コード例 #1
0
def compute_coverage(branch):
    coverage_data = CoverageData()
    try:
        coverage_data.read_file(project_path.join('.coverage').strpath)
    except misc.CoverageException:
        sys.stderr.write("No coverage data found")

    git_proc = subprocess.Popen(['git', 'diff', '-U0', branch],
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
    git_output = git_proc.stdout.read()
    files = git_output.split("diff --git")

    from collections import defaultdict
    file_data = defaultdict(list)

    for the_file in files:
        filenames = re.findall('a/(.*?) b/(.*)', the_file)
        if not filenames:
            continue
        filename = project_path.join(filenames[0][1])
        if '.py' not in filename.strpath:
            continue
        the_file += "git_output_checker"
        the_diffs = re.findall(
            '(@@.*?@@.*?(?=@@|git_output_checker))',
            the_file,
            re.M | re.S,
        )
        for diff in the_diffs:
            diff_args = re.match('@@ -(\d+)(,(\d+))*\s+\+(\d+)(,(\d+))*',
                                 diff).groups()
            if diff_args[5]:
                for extra_line in range(int(diff_args[5])):
                    file_data[filename].append(extra_line + int(diff_args[3]))
            else:
                file_data[filename].append(int(diff_args[3]))

    line_count = 0
    completed_lines = 0
    for file_changed, lines in file_data.iteritems():
        for line in lines:
            line_count += 1
            used_lines = coverage_data.lines(file_changed)
            if not used_lines:
                continue
            if isinstance(used_lines, int):
                used_lines = set([used_lines])
            else:
                used_lines = set(used_lines)
            if line in used_lines:
                completed_lines += 1

    return float(completed_lines) / line_count * 100
コード例 #2
0
ファイル: apidoc.py プロジェクト: jkrocil/cfme_tests
def setup(sphinx):
    """Main sphinx entry point, calls sphinx-apidoc"""
    for module in modules_to_document:
        module_path = project_path.join(module).strpath
        tests_exclude_path = project_path.join(module, "tests").strpath
        output_module_path = _doc_modules_path.join(module).strpath

        # Shove stdout into a pipe to supress the output, but still let stderr out
        args = ["sphinx-apidoc", "-T", "-e", "-o", output_module_path, module_path, tests_exclude_path]
        proc = subprocess.Popen(args, stdout=subprocess.PIPE)
        proc.wait()
    sphinx.add_config_value("clean_autogenerated_docs", False, rebuild="")
    sphinx.connect("build-finished", purge_module_apidoc)
コード例 #3
0
def setup(sphinx):
    """Main sphinx entry point, calls sphinx-apidoc"""
    for module in modules_to_document:
        module_path = project_path.join(module).strpath
        tests_exclude_path = project_path.join(module, 'tests').strpath
        output_module_path = _doc_modules_path.join(module).strpath

        # Shove stdout into a pipe to supress the output, but still let stderr out
        args = ['sphinx-apidoc', '-T', '-e', '-o', output_module_path, module_path,
            tests_exclude_path]
        proc = subprocess.Popen(args, stdout=subprocess.PIPE)
        proc.wait()
    sphinx.add_config_value('clean_autogenerated_docs', False, rebuild='')
    sphinx.connect('build-finished', purge_module_apidoc)
コード例 #4
0
def compute_coverage(branch):
    coverage_data = CoverageData()
    try:
        coverage_data.read_file(project_path.join('.coverage').strpath)
    except misc.CoverageException:
        sys.stderr.write("No coverage data found")

    git_proc = subprocess.Popen(['git', 'diff', '-U0', branch],
        stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    git_output = git_proc.stdout.read()
    files = git_output.split("diff --git")

    from collections import defaultdict
    file_data = defaultdict(list)

    for the_file in files:
        filenames = re.findall('a/(.*?) b/(.*)', the_file)
        if not filenames:
            continue
        filename = project_path.join(filenames[0][1])
        if '.py' not in filename.strpath:
            continue
        the_file += "git_output_checker"
        the_diffs = re.findall('(@@.*?@@.*?(?=@@|git_output_checker))', the_file, re.M | re.S, )
        for diff in the_diffs:
            diff_args = re.match('@@ -(\d+)(,(\d+))*\s+\+(\d+)(,(\d+))*', diff).groups()
            if diff_args[5]:
                for extra_line in range(int(diff_args[5])):
                    file_data[filename].append(extra_line + int(diff_args[3]))
            else:
                file_data[filename].append(int(diff_args[3]))

    line_count = 0
    completed_lines = 0
    for file_changed, lines in file_data.iteritems():
        for line in lines:
            line_count += 1
            used_lines = coverage_data.lines(file_changed)
            if not used_lines:
                continue
            if isinstance(used_lines, int):
                used_lines = set([used_lines])
            else:
                used_lines = set(used_lines)
            if line in used_lines:
                completed_lines += 1

    return float(completed_lines) / line_count * 100
コード例 #5
0
ファイル: plugin.py プロジェクト: jdemon519/cfme_tests
def pytest_configure(config):
    if config.getoption("appliances"):
        return
    if not config.getoption('--use-sprout'):
        return

    provision_request = SproutProvisioningRequest.from_config(config)

    mgr = config._sprout_mgr = SproutManager()
    requested_appliances = mgr.request_appliances(provision_request)
    config.option.appliances[:] = []
    appliances = config.option.appliances
    # Push an appliance to the stack to have proper reference for test collection
    # FIXME: this is a bad hack based on the need for controll of collection partitioning
    appliance_stack.push(
        IPAppliance(address=requested_appliances[0]["ip_address"]))
    log.info("Appliances were provided:")
    for appliance in requested_appliances:
        url = "https://{}/".format(appliance["ip_address"])
        appliances.append(url)
        log.info("- %s is %s", url, appliance['name'])

    mgr.reset_timer()
    # Set the base_url for collection purposes on the first appliance
    conf.runtime["env"]["base_url"] = appliances[0]
    # Retrieve and print the template_name for Jenkins to pick up
    template_name = requested_appliances[0]["template_name"]
    conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name
    log.info("appliance_template: %s", template_name)
    with project_path.join('.appliance_template').open('w') as template_file:
        template_file.write('export appliance_template="{}"'.format(template_name))
    log.info("Sprout setup finished.")
コード例 #6
0
def pytest_configure(config):
    global appliance
    global pool_id
    global sprout
    if not config.option.appliances and (config.option.use_sprout and
                                         config.option.sprout_appliances == 1):
        terminal = reporter()
        sprout = SproutClient.from_config()
        terminal.write("Requesting a single appliance from sprout...\n")
        pool_id = sprout.request_appliances(
            config.option.sprout_group,
            count=config.option.sprout_appliances,
            version=config.option.sprout_version,
            date=config.option.sprout_date,
            lease_time=config.option.sprout_timeout)
        terminal.write(
            "Appliance pool {}. Waiting for fulfillment ...\n".format(pool_id))
        at_exit(destroy_the_pool)
        if config.option.sprout_desc is not None:
            sprout.set_pool_description(pool_id,
                                        str(config.option.sprout_desc))
        try:
            result = wait_for(
                lambda: sprout.request_check(pool_id)["fulfilled"],
                num_sec=config.option.sprout_provision_timeout * 60,
                delay=5,
                message="requesting appliance was fulfilled")
        except:
            pool = sprout.request_check(pool_id)
            dump_pool_info(lambda x: terminal.write("{}\n".format(x)), pool)
            terminal.write("Destroying the pool on error.\n")
            sprout.destroy_pool(pool_id)
            raise
        terminal.write("Provisioning took {0:.1f} seconds\n".format(
            result.duration))
        request = sprout.request_check(pool_id)
        ip_address = request["appliances"][0]["ip_address"]
        terminal.write(
            "Appliance requested at address {} ...\n".format(ip_address))
        reset_timer(sprout, pool_id, config.option.sprout_timeout)
        terminal.write("Appliance lease timer is running ...\n")
        appliance = IPAppliance(address=ip_address)
        appliance.push()
        # Retrieve and print the template_name for Jenkins to pick up
        template_name = request["appliances"][0]["template_name"]
        conf.runtime["cfme_data"]["basic_info"][
            "appliance_template"] = template_name
        terminal.write("appliance_template=\"{}\";\n".format(template_name))
        with project_path.join('.appliance_template').open(
                'w') as template_file:
            template_file.write(
                'export appliance_template="{}"'.format(template_name))
        terminal.write("Single appliance Sprout setup finished.\n")
        # And set also the appliances_provider
        provider = request["appliances"][0]["provider"]
        terminal.write("appliance_provider=\"{}\";\n".format(provider))
        conf.runtime["cfme_data"]["basic_info"][
            "appliances_provider"] = provider
    yield
コード例 #7
0
def pytest_configure(config):
    global appliance
    global pool_id
    global sprout
    yield
    if not config.option.appliances and (config.option.use_sprout
            and config.option.sprout_appliances == 1):
        terminal = reporter()
        sprout = SproutClient.from_config()
        terminal.write("Requesting single appliance from sprout...\n")
        pool_id = sprout.request_appliances(
            config.option.sprout_group,
            count=config.option.sprout_appliances,
            version=config.option.sprout_version,
            date=config.option.sprout_date,
            lease_time=config.option.sprout_timeout
        )
        terminal.write("Appliance pool {}. Waiting for fulfillment ...\n".format(pool_id))
        at_exit(destroy_the_pool)
        if config.option.sprout_desc is not None:
            sprout.set_pool_description(pool_id, str(config.option.sprout_desc))
        try:
            result = wait_for(
                lambda: sprout.request_check(pool_id)["fulfilled"],
                num_sec=config.option.sprout_provision_timeout * 60,
                delay=5,
                message="requesting appliance was fulfilled"
            )
        except:
            pool = sprout.request_check(pool_id)
            dump_pool_info(lambda x: terminal.write("{}\n".format(x)), pool)
            terminal.write("Destroying the pool on error.\n")
            sprout.destroy_pool(pool_id)
            raise
        terminal.write("Provisioning took {0:.1f} seconds\n".format(result.duration))
        request = sprout.request_check(pool_id)
        ip_address = request["appliances"][0]["ip_address"]
        terminal.write("Appliance requested at address {} ...\n".format(ip_address))
        reset_timer(sprout, pool_id, config.option.sprout_timeout)
        terminal.write("Appliance lease timer is running ...\n")
        appliance = IPAppliance(address=ip_address)
        # Retrieve and print the template_name for Jenkins to pick up
        template_name = request["appliances"][0]["template_name"]
        conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name
        terminal.write("appliance_template=\"{}\";\n".format(template_name))
        with project_path.join('.appliance_template').open('w') as template_file:
            template_file.write('export appliance_template="{}"'.format(template_name))
        terminal.write("Single appliance Sprout setup finished.\n")
        # And set also the appliances_provider
        provider = request["appliances"][0]["provider"]
        terminal.write("appliance_provider=\"{}\";\n".format(provider))
        conf.runtime["cfme_data"]["basic_info"]["appliances_provider"] = provider
コード例 #8
0
ファイル: node_annotate.py プロジェクト: anewmanRH/cfme_tests
def pytest_configure(config):
    path = cfme_data.get("cfme_annotations_path")
    if path:
        to_parse = project_path.join(path)
        parsed = parse(to_parse)
        if not parsed:
            store.terminalreporter.line("no test annotation found in {}".format(to_parse), yellow=True)
    else:
        store.terminalreporter.line("no test annotation found in {}".format(path), yellow=True)
        parsed = []
    config.pluginmanager.register(MarkFromMap.from_parsed_list(parsed, "tier", pytest.mark.tier))
    config.pluginmanager.register(MarkFromMap.from_parsed_list(parsed, "requirement", pytest.mark.requirement))
    config.pluginmanager.register(MarkFromMap.from_parsed_list(parsed, "type", pytest.mark.__getattr__))
コード例 #9
0
ファイル: node_annotate.py プロジェクト: vrutkovs/cfme_tests
def pytest_configure(config):
    path = cfme_data.get('cfme_annotations_path')
    if path:
        to_parse = project_path.join(path)
        parsed = parse(to_parse)
        if not parsed:
            store.terminalreporter.line('no test annotation found in %s' % to_parse, yellow=True)
    else:
        store.terminalreporter.line('no test annotation found in %s' % path, yellow=True)
        parsed = []
    config.pluginmanager.register(MarkFromMap.from_parsed_list(
        parsed, 'tier', pytest.mark.tier))
    config.pluginmanager.register(MarkFromMap.from_parsed_list(
        parsed, 'requirement', pytest.mark.requirement))
    config.pluginmanager.register(MarkFromMap.from_parsed_list(parsed, 'type',
                                                               pytest.mark.__getattr__))
コード例 #10
0
def pytest_configure(config):
    global proc
    if not SLAVEID and not proc and isinstance(art_client, ArtifactorClient):
        import subprocess
        path = project_path.join('utils', 'artifactor_start.py')
        cmd = [path.strpath]
        cmd.append('--port')
        cmd.append(str(art_client.port))
        if config.getvalue('run_id'):
            cmd.append('--run-id')
            cmd.append(str(config.getvalue('run_id')))
        proc = subprocess.Popen(cmd)
        wait_for(net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True},
                 num_sec=10, message="wait for artifactor to start")
        config.option.artifactor_port = art_client.port
    elif isinstance(art_client, ArtifactorClient):
        art_client.port = config.option.artifactor_port
コード例 #11
0
def get_template_from_config(template_config_name):
    """
    Convenience function to grab the details for a template from the yamls.
    """

    template_config = conf.cfme_data.get('customization_templates', {})[template_config_name]

    script_data = load_data_file(str(project_path.join(template_config['script_file'])),
                                 replacements=template_config['replacements'])

    script_data = script_data.read()

    return CustomizationTemplate(name=template_config['name'],
                                 description=template_config['description'],
                                 image_type=template_config['image_type'],
                                 script_type=template_config['script_type'],
                                 script_data=script_data)
コード例 #12
0
ファイル: pxe.py プロジェクト: rrasouli/cfme_tests
def get_template_from_config(template_config_name):
    """
    Convenience function to grab the details for a template from the yamls.
    """

    template_config = conf.cfme_data.get('customization_templates', {})[template_config_name]

    script_data = load_data_file(str(project_path.join(template_config['script_file'])),
                                 replacements=template_config['replacements'])

    script_data = script_data.read()

    return CustomizationTemplate(name=template_config['name'],
                                 description=template_config['description'],
                                 image_type=template_config['image_type'],
                                 script_type=template_config['script_type'],
                                 script_data=script_data)
コード例 #13
0
def pytest_configure(config):
    global proc
    if not SLAVEID and not proc and isinstance(art_client, ArtifactorClient):
        import subprocess
        path = project_path.join('utils', 'artifactor_start.py')
        cmd = [path.strpath]
        cmd.append('--port')
        cmd.append(str(art_client.port))
        if config.getvalue('run_id'):
            cmd.append('--run-id')
            cmd.append(str(config.getvalue('run_id')))
        proc = subprocess.Popen(cmd)
        wait_for(net_check, func_args=[art_client.port, '127.0.0.1'], func_kwargs={'force': True},
                 num_sec=10, message="wait for artifactor to start")
        config.option.artifactor_port = art_client.port
    elif isinstance(art_client, ArtifactorClient):
        art_client.port = config.option.artifactor_port
    art_client.fire_hook('setup_merkyl', ip=appliance_ip_address)
コード例 #14
0
ファイル: node_annotate.py プロジェクト: vrutkovs/cfme_tests
def pytest_configure(config):
    path = cfme_data.get('cfme_annotations_path')
    if path:
        to_parse = project_path.join(path)
        parsed = parse(to_parse)
        if not parsed:
            store.terminalreporter.line('no test annotation found in %s' %
                                        to_parse,
                                        yellow=True)
    else:
        store.terminalreporter.line('no test annotation found in %s' % path,
                                    yellow=True)
        parsed = []
    config.pluginmanager.register(
        MarkFromMap.from_parsed_list(parsed, 'tier', pytest.mark.tier))
    config.pluginmanager.register(
        MarkFromMap.from_parsed_list(parsed, 'requirement',
                                     pytest.mark.requirement))
    config.pluginmanager.register(
        MarkFromMap.from_parsed_list(parsed, 'type', pytest.mark.__getattr__))
コード例 #15
0
ファイル: node_annotate.py プロジェクト: anewmanRH/cfme_tests
        file_part = caseid[: -needle - 1].replace(".", os.sep)
    else:
        file_part = caseid

    return "{}.py::{}{}".format(file_part, attribute_part, parameter_part)


def _clean(mapping):
    mapping.pop("", "")
    try:
        return {
            "requirement": int(mapping["Requirement"]),
            "tier": int(mapping["TestTier"]),
            "id": generate_nodeid(mapping),
            "type": mapping["TestType"].lower(),
        }
    except (TypeError, ValueError):
        return None


def parse(path):
    if not path.check():
        return []
    with path.open() as fp:
        return filter(None, map(_clean, csv.DictReader(fp)))


if __name__ == "__main__":
    mapping_file = project_path.join(py.std.sys.argv[1])
    print(yaml.dump(parse(mapping_file), default_flow_style=False))
コード例 #16
0
ファイル: provider.py プロジェクト: rananda/cfme_tests
def import_all_modules_of(loc):
    path = project_path.join('{}'.format(loc.replace('.', '/'))).strpath
    for _, name, _ in pkgutil.iter_modules([path]):
        importlib.import_module('{}.{}'.format(loc, name))
コード例 #17
0
ファイル: ssh.py プロジェクト: rananda/cfme_tests
    def __int__(self):
        # handling int(x)
        return self.rc

    def __cmp__(self, other):
        # Handling comparison to strings or numbers
        if isinstance(other, int):
            return cmp(self.rc, other)
        elif isinstance(other, basestring):
            return cmp(self.output, other)
        else:
            raise ValueError('You can only compare SSHResult with str or int')


_ssh_key_file = project_path.join('.generated_ssh_key')
_ssh_pubkey_file = project_path.join('.generated_ssh_key.pub')

_client_session = []


class SSHClient(paramiko.SSHClient):
    """paramiko.SSHClient wrapper

    Allows copying/overriding and use as a context manager
    Constructor kwargs are handed directly to paramiko.SSHClient.connect()

    If ``container`` param is specified, then it is assumed that the VM hosts a container of CFME.
    The ``container`` param then contains the name of the container.
    """
    def __init__(self, stream_output=False, **connect_kwargs):
コード例 #18
0
ファイル: __init__.py プロジェクト: FilipB/cfme_tests
    @contextmanager
    def appliances_ignored_when_renaming(self, *appliances):
        with self.atomic() as client:
            ignored_appliances = client._get("renaming_appliances")
            if ignored_appliances is None:
                ignored_appliances = set([])
            for appliance in appliances:
                ignored_appliances.add(appliance)
            client._set("renaming_appliances", ignored_appliances)
        yield
        with self.atomic() as client:
            ignored_appliances = client._get("renaming_appliances")
            if ignored_appliances is None:
                ignored_appliances = set([])
            for appliance in appliances:
                try:
                    ignored_appliances.remove(appliance)
                except KeyError:
                    # Something worng happened, ignore
                    pass
            client._set("renaming_appliances", ignored_appliances)

    @property
    def renaming_appliances(self):
        return self.get("renaming_appliances") or set([])


redis = RedisWrapper(redis_client)
sprout_path = project_path.join("sprout")
コード例 #19
0
def import_all_modules_of(loc):
    path = project_path.join('{}'.format(loc.replace('.', '/'))).strpath
    for _, name, _ in pkgutil.iter_modules([path]):
        importlib.import_module('{}.{}'.format(loc, name))
コード例 #20
0
ファイル: __init__.py プロジェクト: patchkez/cfme_tests
    def __init__(self, config):
        self.config = config
        self.session = None
        self.session_finished = False
        self.countfailures = 0
        self.collection = OrderedDict()
        self.sent_tests = 0
        self.log = create_sublogger('master')
        self.maxfail = config.getvalue("maxfail")
        self._failed_collection_errors = {}
        self.terminal = store.terminalreporter
        self.trdist = None
        self.slaves = SlaveDict()
        self.slave_urls = SlaveDict()
        self.slave_tests = defaultdict(set)
        self.test_groups = self._test_item_generator()

        self._pool = []
        self.pool_lock = Lock()
        from utils.conf import cfme_data
        self.provs = sorted(set(cfme_data['management_systems'].keys()),
                            key=len, reverse=True)
        self.slave_allocation = collections.defaultdict(list)
        self.used_prov = set()

        self.failed_slave_test_groups = deque()
        self.slave_spawn_count = 0
        self.sprout_client = None
        self.sprout_timer = None
        self.sprout_pool = None
        if not self.config.option.use_sprout:
            # Without Sprout
            self.appliances = self.config.option.appliances
        else:
            # Using sprout
            self.sprout_client = SproutClient.from_config()
            try:
                if self.config.option.sprout_desc is not None:
                    jenkins_job = re.findall(r"Jenkins.*[^\d+$]", self.config.option.sprout_desc)
                    if jenkins_job:
                        self.terminal.write(
                            "Check if pool already exists for this '{}' Jenkins job\n".format(
                                jenkins_job[0]))
                        jenkins_job_pools = self.sprout_client.find_pools_by_description(
                            jenkins_job[0], partial=True)
                        for pool in jenkins_job_pools:
                            self.terminal.write("Destroying the old pool {} for '{}' job.\n".format(
                                pool, jenkins_job[0]))
                            self.sprout_client.destroy_pool(pool)
            except Exception as e:
                self.terminal.write(
                    "Exception occurred during old pool deletion, this can be ignored"
                    "proceeding to Request new pool")
                self.terminal.write("> The exception was: {}".format(str(e)))

            self.terminal.write(
                "Requesting {} appliances from Sprout at {}\n".format(
                    self.config.option.sprout_appliances, self.sprout_client.api_entry))
            pool_id = self.sprout_client.request_appliances(
                self.config.option.sprout_group,
                count=self.config.option.sprout_appliances,
                version=self.config.option.sprout_version,
                date=self.config.option.sprout_date,
                lease_time=self.config.option.sprout_timeout
            )
            self.println("Pool {}. Waiting for fulfillment ...".format(pool_id))
            self.sprout_pool = pool_id
            at_exit(self.sprout_client.destroy_pool, self.sprout_pool)
            if self.config.option.sprout_desc is not None:
                self.sprout_client.set_pool_description(
                    pool_id, str(self.config.option.sprout_desc))

            def detailed_check():
                try:
                    result = self.sprout_client.request_check(self.sprout_pool)
                except SproutException as e:
                    # TODO: ensure we only exit this way on sprout usage
                    try:
                        self.sprout_client.destroy_pool(pool_id)
                    except Exception:
                        pass
                    self.println(
                        "sprout pool could not be fulfilled\n{}".format(e))
                    pytest.exit(1)

                self.println("[{now:%H:%M}] fulfilled at {progress:2}%".format(
                    now=datetime.now(),
                    progress=result['progress']
                ))
                return result["fulfilled"]
            try:
                result = wait_for(
                    detailed_check,
                    num_sec=self.config.option.sprout_provision_timeout * 60,
                    delay=5,
                    message="requesting appliances was fulfilled"
                )
            except Exception:
                pool = self.sprout_client.request_check(self.sprout_pool)
                dump_pool_info(self.println, pool)
                self.println("Destroying the pool on error.")
                self.sprout_client.destroy_pool(pool_id)
                raise
            else:
                pool = self.sprout_client.request_check(self.sprout_pool)
                dump_pool_info(self.println, pool)
            self.println("Provisioning took {0:.1f} seconds".format(result.duration))
            request = self.sprout_client.request_check(self.sprout_pool)
            self.appliances = []
            # Push an appliance to the stack to have proper reference for test collection
            # FIXME: this is a bad hack based on the need for controll of collection partitioning
            appliance_stack.push(
                IPAppliance(address=request["appliances"][0]["ip_address"]))
            self.println("Appliances were provided:")
            for appliance in request["appliances"]:
                url = "https://{}/".format(appliance["ip_address"])
                self.appliances.append(url)
                self.println("- {} is {}".format(url, appliance['name']))
            map(lambda a: "https://{}/".format(a["ip_address"]), request["appliances"])
            self._reset_timer()
            # Set the base_url for collection purposes on the first appliance
            conf.runtime["env"]["base_url"] = self.appliances[0]
            # Retrieve and print the template_name for Jenkins to pick up
            template_name = request["appliances"][0]["template_name"]
            conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name
            self.terminal.write("appliance_template=\"{}\";\n".format(template_name))
            with project_path.join('.appliance_template').open('w') as template_file:
                template_file.write('export appliance_template="{}"'.format(template_name))
            self.println("Parallelized Sprout setup finished.")
            self.slave_appliances_data = {}
            for appliance in request["appliances"]:
                self.slave_appliances_data[appliance["ip_address"]] = (
                    appliance["template_name"], appliance["provider"]
                )

        # set up the ipc socket
        zmq_endpoint = 'tcp://127.0.0.1:{}'.format(random_port())
        ctx = zmq.Context.instance()
        self.sock = ctx.socket(zmq.ROUTER)
        self.sock.bind('{}'.format(zmq_endpoint))

        # clean out old slave config if it exists
        slave_config = conf_path.join('slave_config.yaml')
        slave_config.check() and slave_config.remove()

        # write out the slave config
        conf.runtime['slave_config'] = {
            'args': self.config.args,
            'options': self.config.option.__dict__,
            'zmq_endpoint': zmq_endpoint,
            'sprout': self.sprout_client is not None and self.sprout_pool is not None,
        }
        if hasattr(self, "slave_appliances_data"):
            conf.runtime['slave_config']["appliance_data"] = self.slave_appliances_data
        conf.runtime['slave_config']['options']['use_sprout'] = False  # Slaves don't use sprout
        conf.save('slave_config')

        for i, base_url in enumerate(self.appliances):
            self.slave_urls.add(base_url)

        for slave in sorted(self.slave_urls):
            self.print_message("using appliance {}".format(self.slave_urls[slave]),
                slave, green=True)

        # Start the recv queue
        self._recv_queue = deque()
        recv_queuer = Thread(target=_recv_queue, args=(self,))
        recv_queuer.daemon = True
        recv_queuer.start()
コード例 #21
0
ファイル: ssh.py プロジェクト: pavelzag/cfme_tests
from utils import conf, ports, version
from utils.log import logger
from utils.net import net_check
from fixtures.pytest_store import store
from utils.path import project_path
from utils.quote import quote
from utils.timeutil import parsetime


# Default blocking time before giving up on an ssh command execution,
# in seconds (float)
RUNCMD_TIMEOUT = 1200.0
SSHResult = namedtuple("SSHResult", ["rc", "output"])

_ssh_key_file = project_path.join('.generated_ssh_key')
_ssh_pubkey_file = project_path.join('.generated_ssh_key.pub')

_client_session = []


class SSHClient(paramiko.SSHClient):
    """paramiko.SSHClient wrapper

    Allows copying/overriding and use as a context manager
    Constructor kwargs are handed directly to paramiko.SSHClient.connect()

    If ``container`` param is specified, then it is assumed that the VM hosts a container of CFME.
    The ``container`` param then contains the name of the container.
    """
    def __init__(self, stream_output=False, **connect_kwargs):
コード例 #22
0
ファイル: node_annotate.py プロジェクト: vrutkovs/cfme_tests
        file_part = caseid[:-needle - 1].replace('.', os.sep)
    else:
        file_part = caseid

    return "%s.py::%s%s" % (file_part, attribute_part, parameter_part)


def _clean(mapping):
    mapping.pop('', '')
    try:
        return {
            'requirement': int(mapping['Requirement']),
            'tier': int(mapping['TestTier']),
            'id': generate_nodeid(mapping),
            'type': mapping['TestType'].lower(),
        }
    except (TypeError, ValueError):
        return None


def parse(path):
    if not path.check():
        return []
    with path.open() as fp:
        return filter(None, map(_clean, csv.DictReader(fp)))


if __name__ == '__main__':
    mapping_file = project_path.join(py.std.sys.argv[1])
    print(yaml.dump(parse(mapping_file), default_flow_style=False))
コード例 #23
0
    @contextmanager
    def appliances_ignored_when_renaming(self, *appliances):
        with self.atomic() as client:
            ignored_appliances = client._get("renaming_appliances")
            if ignored_appliances is None:
                ignored_appliances = set([])
            for appliance in appliances:
                ignored_appliances.add(appliance)
            client._set("renaming_appliances", ignored_appliances)
        yield
        with self.atomic() as client:
            ignored_appliances = client._get("renaming_appliances")
            if ignored_appliances is None:
                ignored_appliances = set([])
            for appliance in appliances:
                try:
                    ignored_appliances.remove(appliance)
                except KeyError:
                    # Something worng happened, ignore
                    pass
            client._set("renaming_appliances", ignored_appliances)

    @property
    def renaming_appliances(self):
        return self.get("renaming_appliances") or set([])


redis = RedisWrapper(redis_client)
sprout_path = project_path.join("sprout")
コード例 #24
0
        the_diffs = re.findall('(@@.*?@@.*?(?=@@|git_output_checker))', the_file, re.M | re.S, )
        for diff in the_diffs:
            diff_args = re.match('@@ -(\d+)(,(\d+))*\s+\+(\d+)(,(\d+))*', diff).groups()
            if diff_args[5]:
                for extra_line in range(int(diff_args[5])):
                    file_data[filename].append(extra_line + int(diff_args[3]))
            else:
                file_data[filename].append(int(diff_args[3]))

    line_count = 0
    completed_lines = 0
    for file_changed, lines in file_data.iteritems():
        for line in lines:
            line_count += 1
            used_lines = coverage_data.lines(file_changed)
            if not used_lines:
                continue
            if isinstance(used_lines, int):
                used_lines = set([used_lines])
            else:
                used_lines = set(used_lines)
            if line in used_lines:
                completed_lines += 1

    return float(completed_lines) / line_count * 100

if __name__ == "__main__":
    result = compute_coverage(sys.argv[1])
    with open(project_path.join('coverage_result.txt').strpath, "w") as f:
        f.write("{}".format(result))
コード例 #25
0
import json
import os
import os.path
from datetime import datetime

from artifactor.plugins.post_result import test_report
from utils import read_env
from utils.path import project_path
from utils.trackerbot import post_jenkins_result

job_name = os.environ['JOB_NAME']
number = int(os.environ['BUILD_NUMBER'])
date = str(datetime.now())

# reduce returns to bools for easy logic
runner_src = read_env(project_path.join('.jenkins_runner_result'))
runner_return = runner_src.get('RUNNER_RETURN', '1') == '0'
test_return = runner_src.get('TEST_RETURN', '1') == '0'

# 'stream' environ is set by jenkins for all stream test jobs
# but not in the template tester
if job_name not in ('template-tester', 'template-tester-openstack',
                    'template-tester-rhevm', 'template-tester-virtualcenter'):
    # try to pull out the appliance template name
    template_src = read_env(project_path.join('.appliance_template'))
    template = template_src.get('appliance_template', 'Unknown')
    stream = os.environ['stream']
else:
    tester_src = read_env(project_path.join('.template_tester'))
    stream = tester_src['stream']
    template = tester_src['appliance_template']
コード例 #26
0
            diff_args = re.match('@@ -(\d+)(,(\d+))*\s+\+(\d+)(,(\d+))*',
                                 diff).groups()
            if diff_args[5]:
                for extra_line in range(int(diff_args[5])):
                    file_data[filename].append(extra_line + int(diff_args[3]))
            else:
                file_data[filename].append(int(diff_args[3]))

    line_count = 0
    completed_lines = 0
    for file_changed, lines in file_data.iteritems():
        for line in lines:
            line_count += 1
            used_lines = coverage_data.lines(file_changed)
            if not used_lines:
                continue
            if isinstance(used_lines, int):
                used_lines = set([used_lines])
            else:
                used_lines = set(used_lines)
            if line in used_lines:
                completed_lines += 1

    return float(completed_lines) / line_count * 100


if __name__ == "__main__":
    result = compute_coverage(sys.argv[1])
    with open(project_path.join('coverage_result.txt').strpath, "w") as f:
        f.write("{}".format(result))
コード例 #27
0
    def __init__(self, config):
        self.config = config
        self.session = None
        self.session_finished = False
        self.countfailures = 0
        self.collection = OrderedDict()
        self.sent_tests = 0
        self.log = create_sublogger('master')
        self.maxfail = config.getvalue("maxfail")
        self._failed_collection_errors = {}
        self.terminal = store.terminalreporter
        self.trdist = None
        self.slaves = SlaveDict()
        self.slave_urls = SlaveDict()
        self.slave_tests = defaultdict(set)
        self.test_groups = self._test_item_generator()
        self.failed_slave_test_groups = deque()
        self.slave_spawn_count = 0
        self.sprout_client = None
        self.sprout_timer = None
        self.sprout_pool = None
        if not self.config.option.use_sprout:
            # Without Sprout
            self.appliances = self.config.option.appliances
        else:
            # Using sprout
            self.sprout_client = SproutClient.from_config()
            self.terminal.write(
                "Requesting {} appliances from Sprout at {}\n".format(
                    self.config.option.sprout_appliances,
                    self.sprout_client.api_entry))
            pool_id = self.sprout_client.request_appliances(
                self.config.option.sprout_group,
                count=self.config.option.sprout_appliances,
                version=self.config.option.sprout_version,
                date=self.config.option.sprout_date,
                lease_time=self.config.option.sprout_timeout)
            self.terminal.write(
                "Pool {}. Waiting for fulfillment ...\n".format(pool_id))
            self.sprout_pool = pool_id
            at_exit(self.sprout_client.destroy_pool, self.sprout_pool)
            if self.config.option.sprout_desc is not None:
                self.sprout_client.set_pool_description(
                    pool_id, str(self.config.option.sprout_desc))
            try:
                result = wait_for(
                    lambda: self.sprout_client.request_check(self.sprout_pool)[
                        "fulfilled"],
                    num_sec=self.config.option.sprout_provision_timeout * 60,
                    delay=5,
                    message="requesting appliances was fulfilled")
            except:
                pool = self.sprout_client.request_check(self.sprout_pool)
                dump_pool_info(lambda x: self.terminal.write("{}\n".format(x)),
                               pool)
                self.terminal.write("Destroying the pool on error.\n")
                self.sprout_client.destroy_pool(pool_id)
                raise
            else:
                pool = self.sprout_client.request_check(self.sprout_pool)
                dump_pool_info(lambda x: self.terminal.write("{}\n".format(x)),
                               pool)
            self.terminal.write("Provisioning took {0:.1f} seconds\n".format(
                result.duration))
            request = self.sprout_client.request_check(self.sprout_pool)
            self.appliances = []
            # Push an appliance to the stack to have proper reference for test collection
            IPAppliance(address=request["appliances"][0]["ip_address"]).push()
            self.terminal.write("Appliances were provided:\n")
            for appliance in request["appliances"]:
                url = "https://{}/".format(appliance["ip_address"])
                self.appliances.append(url)
                self.terminal.write("- {} is {}\n".format(
                    url, appliance['name']))
            map(lambda a: "https://{}/".format(a["ip_address"]),
                request["appliances"])
            self._reset_timer()
            # Set the base_url for collection purposes on the first appliance
            conf.runtime["env"]["base_url"] = self.appliances[0]
            # Retrieve and print the template_name for Jenkins to pick up
            template_name = request["appliances"][0]["template_name"]
            conf.runtime["cfme_data"]["basic_info"][
                "appliance_template"] = template_name
            self.terminal.write(
                "appliance_template=\"{}\";\n".format(template_name))
            with project_path.join('.appliance_template').open(
                    'w') as template_file:
                template_file.write(
                    'export appliance_template="{}"'.format(template_name))
            self.terminal.write("Parallelized Sprout setup finished.\n")
            self.slave_appliances_data = {}
            for appliance in request["appliances"]:
                self.slave_appliances_data[appliance["ip_address"]] = (
                    appliance["template_name"], appliance["provider"])

        # set up the ipc socket
        zmq_endpoint = 'tcp://127.0.0.1:{}'.format(random_port())
        ctx = zmq.Context.instance()
        self.sock = ctx.socket(zmq.ROUTER)
        self.sock.bind('%s' % zmq_endpoint)

        # clean out old slave config if it exists
        slave_config = conf_path.join('slave_config.yaml')
        slave_config.check() and slave_config.remove()

        # write out the slave config
        conf.runtime['slave_config'] = {
            'args':
            self.config.args,
            'options':
            self.config.option.__dict__,
            'zmq_endpoint':
            zmq_endpoint,
            'sprout':
            self.sprout_client is not None and self.sprout_pool is not None,
        }
        if hasattr(self, "slave_appliances_data"):
            conf.runtime['slave_config'][
                "appliance_data"] = self.slave_appliances_data
        conf.runtime['slave_config']['options'][
            'use_sprout'] = False  # Slaves don't use sprout
        conf.save('slave_config')

        for i, base_url in enumerate(self.appliances):
            self.slave_urls.add(base_url)

        for slave in sorted(self.slave_urls):
            self.print_message("using appliance {}".format(
                self.slave_urls[slave]),
                               slave,
                               green=True)

        # Start the recv queue
        self._recv_queue = deque()
        recv_queuer = Thread(target=_recv_queue, args=(self, ))
        recv_queuer.daemon = True
        recv_queuer.start()
コード例 #28
0
ファイル: __init__.py プロジェクト: seandst/cfme_tests
    def __init__(self, config):
        self.config = config
        self.session = None
        self.session_finished = False
        self.countfailures = 0
        self.collection = OrderedDict()
        self.sent_tests = 0
        self.log = create_sublogger('master')
        self.maxfail = config.getvalue("maxfail")
        self._failed_collection_errors = {}
        self.terminal = store.terminalreporter
        self.trdist = None
        self.slaves = SlaveDict()
        self.slave_urls = SlaveDict()
        self.slave_tests = defaultdict(set)
        self.test_groups = self._test_item_generator()
        self.failed_slave_test_groups = deque()
        self.slave_spawn_count = 0
        self.sprout_client = None
        self.sprout_timer = None
        self.sprout_pool = None
        if not self.config.option.use_sprout:
            # Without Sprout
            self.appliances = self.config.option.appliances
        else:
            # Using sprout
            self.sprout_client = SproutClient.from_config()
            self.terminal.write(
                "Requesting {} appliances from Sprout at {}\n".format(
                    self.config.option.sprout_appliances, self.sprout_client.api_entry))
            pool_id = self.sprout_client.request_appliances(
                self.config.option.sprout_group,
                count=self.config.option.sprout_appliances,
                version=self.config.option.sprout_version,
                date=self.config.option.sprout_date,
                lease_time=self.config.option.sprout_timeout
            )
            self.terminal.write("Pool {}. Waiting for fulfillment ...\n".format(pool_id))
            self.sprout_pool = pool_id
            at_exit(self.sprout_client.destroy_pool, self.sprout_pool)
            if self.config.option.sprout_desc is not None:
                self.sprout_client.set_pool_description(
                    pool_id, str(self.config.option.sprout_desc))
            try:
                result = wait_for(
                    lambda: self.sprout_client.request_check(self.sprout_pool)["fulfilled"],
                    num_sec=self.config.option.sprout_provision_timeout * 60,
                    delay=5,
                    message="requesting appliances was fulfilled"
                )
            except:
                pool = self.sprout_client.request_check(self.sprout_pool)
                dump_pool_info(lambda x: self.terminal.write("{}\n".format(x)), pool)
                self.terminal.write("Destroying the pool on error.\n")
                self.sprout_client.destroy_pool(pool_id)
                raise
            else:
                pool = self.sprout_client.request_check(self.sprout_pool)
                dump_pool_info(lambda x: self.terminal.write("{}\n".format(x)), pool)
            self.terminal.write("Provisioning took {0:.1f} seconds\n".format(result.duration))
            request = self.sprout_client.request_check(self.sprout_pool)
            self.appliances = []
            # Push an appliance to the stack to have proper reference for test collection
            IPAppliance(address=request["appliances"][0]["ip_address"]).push()
            self.terminal.write("Appliances were provided:\n")
            for appliance in request["appliances"]:
                url = "https://{}/".format(appliance["ip_address"])
                self.appliances.append(url)
                self.terminal.write("- {} is {}\n".format(url, appliance['name']))
            map(lambda a: "https://{}/".format(a["ip_address"]), request["appliances"])
            self._reset_timer()
            # Set the base_url for collection purposes on the first appliance
            conf.runtime["env"]["base_url"] = self.appliances[0]
            # Retrieve and print the template_name for Jenkins to pick up
            template_name = request["appliances"][0]["template_name"]
            conf.runtime["cfme_data"]["basic_info"]["appliance_template"] = template_name
            self.terminal.write("appliance_template=\"{}\";\n".format(template_name))
            with project_path.join('.appliance_template').open('w') as template_file:
                template_file.write('export appliance_template="{}"'.format(template_name))
            self.terminal.write("Parallelized Sprout setup finished.\n")
            self.slave_appliances_data = {}
            for appliance in request["appliances"]:
                self.slave_appliances_data[appliance["ip_address"]] = (
                    appliance["template_name"], appliance["provider"]
                )

        # set up the ipc socket
        zmq_endpoint = 'tcp://127.0.0.1:{}'.format(random_port())
        ctx = zmq.Context.instance()
        self.sock = ctx.socket(zmq.ROUTER)
        self.sock.bind('%s' % zmq_endpoint)

        # clean out old slave config if it exists
        slave_config = conf_path.join('slave_config.yaml')
        slave_config.check() and slave_config.remove()

        # write out the slave config
        conf.runtime['slave_config'] = {
            'args': self.config.args,
            'options': self.config.option.__dict__,
            'zmq_endpoint': zmq_endpoint,
            'sprout': self.sprout_client is not None and self.sprout_pool is not None,
        }
        if hasattr(self, "slave_appliances_data"):
            conf.runtime['slave_config']["appliance_data"] = self.slave_appliances_data
        conf.runtime['slave_config']['options']['use_sprout'] = False  # Slaves don't use sprout
        conf.save('slave_config')

        for i, base_url in enumerate(self.appliances):
            self.slave_urls.add(base_url)
        # Fire up the workers
        self._slave_audit()

        # Start the recv queue
        self._recv_queue = deque()
        recv_queuer = Thread(target=_recv_queue, args=(self,))
        recv_queuer.daemon = True
        recv_queuer.start()
コード例 #29
0
import json
import os
import os.path
from datetime import datetime

from artifactor.plugins.post_result import test_report
from utils import read_env
from utils.path import project_path
from utils.trackerbot import post_jenkins_result

job_name = os.environ["JOB_NAME"]
number = int(os.environ["BUILD_NUMBER"])
date = str(datetime.now())

# reduce returns to bools for easy logic
runner_src = read_env(project_path.join(".jenkins_runner_result"))
runner_return = runner_src.get("RUNNER_RETURN", "1") == "0"
test_return = runner_src.get("TEST_RETURN", "1") == "0"


# 'stream' environ is set by jenkins for all stream test jobs
# but not in the template tester
if job_name not in (
    "template-tester",
    "template-tester-openstack",
    "template-tester-rhevm",
    "template-tester-virtualcenter",
):
    # try to pull out the appliance template name
    template_src = read_env(project_path.join(".appliance_template"))
    template = template_src.get("appliance_template", "Unknown")
コード例 #30
0
import json
import os
import os.path
from datetime import datetime

from artifactor.plugins.post_result import test_report
from utils import read_env
from utils.path import project_path
from utils.trackerbot import post_jenkins_result

job_name = os.environ['JOB_NAME']
number = int(os.environ['BUILD_NUMBER'])
date = str(datetime.now())

# reduce returns to bools for easy logic
runner_src = read_env(project_path.join('.jenkins_runner_result'))
runner_return = runner_src.get('RUNNER_RETURN', '1') == '0'
test_return = runner_src.get('TEST_RETURN', '1') == '0'


# 'stream' environ is set by jenkins for all stream test jobs
# but not in the template tester
if job_name not in ('template-tester', 'template-tester-openstack',
                    'template-tester-rhevm', 'template-tester-virtualcenter'):
    # try to pull out the appliance template name
    template_src = read_env(project_path.join('.appliance_template'))
    template = template_src.get('appliance_template', 'Unknown')
    stream = os.environ['stream']
else:
    tester_src = read_env(project_path.join('.template_tester'))
    stream = tester_src['stream']