Esempio n. 1
0
    def write_bot(self, args):
        """
        :type args: TestConfig
        """
        docs = self.find_docs()
        message = self.format_title(help_link=docs)
        output = self.format_block()

        if self.messages:
            verified = all((m.confidence or 0) >= 50 for m in self.messages)
        else:
            verified = False

        bot_data = dict(
            verified=verified,
            docs=docs,
            results=[
                dict(
                    message=message,
                    output=output,
                ),
            ],
        )

        path = self.create_path('bot', '.json')

        if args.explain:
            return

        make_dirs(os.path.dirname(path))

        with open(path, 'w') as bot_fd:
            json.dump(bot_data, bot_fd, indent=4, sort_keys=True)
            bot_fd.write('\n')
Esempio n. 2
0
    def __init__(self, args):
        """
        :type args: EnvironmentConfig
        """
        cache_dir = 'test/cache'

        self.key = os.path.join(cache_dir, self.KEY_NAME)
        self.pub = os.path.join(cache_dir, self.PUB_NAME)

        if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
            base_dir = os.path.expanduser('~/.ansible/test/')

            key = os.path.join(base_dir, self.KEY_NAME)
            pub = os.path.join(base_dir, self.PUB_NAME)

            if not args.explain:
                make_dirs(base_dir)

            if not os.path.isfile(key) or not os.path.isfile(pub):
                run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', key])

            if not args.explain:
                shutil.copy2(key, self.key)
                shutil.copy2(pub, self.pub)

        if args.explain:
            self.pub_contents = None
        else:
            with open(self.pub, 'r') as pub_fd:
                self.pub_contents = pub_fd.read().strip()
Esempio n. 3
0
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, always):
    """
    :param args: IntegrationConfig
    :param test_dir: str
    :param target_names: list[str]
    :param targets_dict: dict[str, IntegrationTarget]
    :param targets_executed: set[str]
    :param always: bool
    """
    for target_name in target_names:
        if not always and target_name in targets_executed:
            continue

        target = targets_dict[target_name]

        if not args.explain:
            # create a fresh test directory for each test target
            remove_tree(test_dir)
            make_dirs(test_dir)

        if target.script_path:
            command_integration_script(args, target)
        else:
            command_integration_role(args, target, None)

        targets_executed.add(target_name)
Esempio n. 4
0
def run_setup_targets(args, test_dir, target_names, targets_dict, targets_executed, always):
    """
    :param args: IntegrationConfig
    :param test_dir: str
    :param target_names: list[str]
    :param targets_dict: dict[str, IntegrationTarget]
    :param targets_executed: set[str]
    :param always: bool
    """
    for target_name in target_names:
        if not always and target_name in targets_executed:
            continue

        target = targets_dict[target_name]

        if not args.explain:
            # create a fresh test directory for each test target
            remove_tree(test_dir)
            make_dirs(test_dir)

        if target.script_path:
            command_integration_script(args, target)
        else:
            command_integration_role(args, target, None)

        targets_executed.add(target_name)
    def __init__(self, args):
        """
        :type args: EnvironmentConfig
        """
        cache_dir = 'test/cache'

        self.key = os.path.join(cache_dir, self.KEY_NAME)
        self.pub = os.path.join(cache_dir, self.PUB_NAME)

        if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
            base_dir = os.path.expanduser('~/.ansible/test/')

            key = os.path.join(base_dir, self.KEY_NAME)
            pub = os.path.join(base_dir, self.PUB_NAME)

            if not args.explain:
                make_dirs(base_dir)

            if not os.path.isfile(key) or not os.path.isfile(pub):
                run_command(
                    args,
                    ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', key])

            if not args.explain:
                shutil.copy2(key, self.key)
                shutil.copy2(pub, self.pub)

        if args.explain:
            self.pub_contents = None
        else:
            with open(self.pub, 'r') as pub_fd:
                self.pub_contents = pub_fd.read().strip()
Esempio n. 6
0
    def _save(self):
        """Save instance information."""
        if self.args.explain:
            return

        make_dirs(os.path.dirname(self.path))

        with open(self.path, 'w') as instance_fd:
            instance_fd.write(self.instance_id)
Esempio n. 7
0
    def _save(self):
        """Save instance information."""
        if self.args.explain:
            return

        make_dirs(os.path.dirname(self.path))

        with open(self.path, 'w') as instance_fd:
            instance_fd.write(self.instance_id)
Esempio n. 8
0
    def _save(self):
        """Save instance information."""
        if self.args.explain:
            return

        config = self.save()

        make_dirs(os.path.dirname(self.path))

        with open(self.path, 'w') as instance_fd:
            instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
    def _save(self):
        """Save instance information."""
        if self.args.explain:
            return

        config = self.save()

        make_dirs(os.path.dirname(self.path))

        with open(self.path, 'w') as instance_fd:
            instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
Esempio n. 10
0
    def _save(self):
        """Save instance information."""
        if self.args.explain:
            return

        make_dirs(os.path.dirname(self.path))

        with open(self.path, 'w') as instance_fd:
            config = dict(
                instance_id=self.instance_id,
                endpoint=self.endpoint,
            )

            instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
Esempio n. 11
0
    def __init__(self, args):
        """
        :type args: EnvironmentConfig
        """
        cache_dir = os.path.join(data_context().content.root, 'test/cache')

        self.key = os.path.join(cache_dir, self.KEY_NAME)
        self.pub = os.path.join(cache_dir, self.PUB_NAME)

        key_dst = os.path.relpath(self.key, data_context().content.root)
        pub_dst = os.path.relpath(self.pub, data_context().content.root)

        if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
            base_dir = os.path.expanduser('~/.ansible/test/')

            key = os.path.join(base_dir, self.KEY_NAME)
            pub = os.path.join(base_dir, self.PUB_NAME)

            if not args.explain:
                make_dirs(base_dir)

            if not os.path.isfile(key) or not os.path.isfile(pub):
                run_command(args, [
                    'ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '',
                    '-f', key
                ])

            self.key = key
            self.pub = pub

            def ssh_key_callback(
                    files):  # type: (t.List[t.Tuple[str, str]]) -> None
                """Add the SSH keys to the payload file list."""
                files.append((key, key_dst))
                files.append((pub, pub_dst))

            data_context().register_payload_callback(ssh_key_callback)

        if args.explain:
            self.pub_contents = None
        else:
            with open(self.pub, 'r') as pub_fd:
                self.pub_contents = pub_fd.read().strip()
Esempio n. 12
0
    def __init__(self, args):
        """
        :type args: CommonConfig
        """
        tmp = os.path.expanduser('~/.ansible/test/')

        self.key = os.path.join(tmp, 'id_rsa')
        self.pub = os.path.join(tmp, 'id_rsa.pub')

        if not os.path.isfile(self.pub):
            if not args.explain:
                make_dirs(tmp)

            run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', self.key])

        if args.explain:
            self.pub_contents = None
        else:
            with open(self.pub, 'r') as pub_fd:
                self.pub_contents = pub_fd.read().strip()
Esempio n. 13
0
def cloud_init(args, targets):
    """
    :type args: IntegrationConfig
    :type targets: tuple[IntegrationTarget]
    """
    if args.metadata.cloud_config is not None:
        return  # cloud configuration already established prior to delegation

    args.metadata.cloud_config = {}

    results = {}

    for provider in get_cloud_providers(args, targets):
        args.metadata.cloud_config[provider.platform] = {}

        start_time = time.time()
        provider.setup()
        end_time = time.time()

        results[provider.platform] = dict(
            platform=provider.platform,
            setup_seconds=int(end_time - start_time),
            targets=[target.name for target in targets],
        )

    if not args.explain and results:
        results_path = 'test/results/data/%s-%s.json' % (
            args.command,
            re.sub(r'[^0-9]', '-',
                   str(datetime.datetime.utcnow().replace(microsecond=0))))

        data = dict(clouds=results, )

        make_dirs(os.path.dirname(results_path))

        with open(results_path, 'w') as results_fd:
            results_fd.write(json.dumps(data, sort_keys=True, indent=4))
Esempio n. 14
0
    def __init__(self, args):
        """
        :type args: EnvironmentConfig
        """
        cache_dir = 'test/cache'

        self.key = os.path.join(cache_dir, self.KEY_NAME)
        self.pub = os.path.join(cache_dir, self.PUB_NAME)

        if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
            base_dir = os.path.expanduser('~/.ansible/test/')

            key = os.path.join(base_dir, self.KEY_NAME)
            pub = os.path.join(base_dir, self.PUB_NAME)

            if not args.explain:
                make_dirs(base_dir)

            if not os.path.isfile(key) or not os.path.isfile(pub):
                run_command(args, ['ssh-keygen', '-m', 'PEM', '-q', '-t', 'rsa', '-N', '', '-f', key])

                # newer ssh-keygen PEM output (such as on RHEL 8.1) is not recognized by paramiko
                with open(key, 'r+') as key_fd:
                    key_contents = key_fd.read()
                    key_contents = re.sub(r'(BEGIN|END) PRIVATE KEY', r'\1 RSA PRIVATE KEY', key_contents)
                    key_fd.seek(0)
                    key_fd.write(key_contents)

            if not args.explain:
                shutil.copy2(key, self.key)
                shutil.copy2(pub, self.pub)

        if args.explain:
            self.pub_contents = None
        else:
            with open(self.pub, 'r') as pub_fd:
                self.pub_contents = pub_fd.read().strip()
Esempio n. 15
0
def integration_test_environment(args, target, inventory_path):
    """
    :type args: IntegrationConfig
    :type target: IntegrationTarget
    :type inventory_path: str
    """
    vars_file = 'integration_config.yml'

    if args.no_temp_workdir or 'no/temp_workdir/' in target.aliases:
        display.warning(
            'Disabling the temp work dir is a temporary debugging feature that may be removed in the future without notice.'
        )

        integration_dir = os.path.abspath('test/integration')
        inventory_path = os.path.abspath(inventory_path)
        ansible_config = os.path.join(integration_dir, '%s.cfg' % args.command)
        vars_file = os.path.join(integration_dir, vars_file)

        yield IntegrationEnvironment(integration_dir, inventory_path,
                                     ansible_config, vars_file)
        return

    root_temp_dir = os.path.expanduser('~/.ansible/test/tmp')

    prefix = '%s-' % target.name
    suffix = u'-\u00c5\u00d1\u015a\u00cc\u03b2\u0141\u00c8'

    if args.no_temp_unicode or 'no/temp_unicode/' in target.aliases:
        display.warning(
            'Disabling unicode in the temp work dir is a temporary debugging feature that may be removed in the future without notice.'
        )
        suffix = '-ansible'

    if isinstance('', bytes):
        suffix = suffix.encode('utf-8')

    if args.explain:
        temp_dir = os.path.join(root_temp_dir, '%stemp%s' % (prefix, suffix))
    else:
        make_dirs(root_temp_dir)
        temp_dir = tempfile.mkdtemp(prefix=prefix,
                                    suffix=suffix,
                                    dir=root_temp_dir)

    try:
        display.info('Preparing temporary directory: %s' % temp_dir,
                     verbosity=2)

        inventory_names = {
            PosixIntegrationConfig: 'inventory',
            WindowsIntegrationConfig: 'inventory.winrm',
            NetworkIntegrationConfig: 'inventory.networking',
        }

        inventory_name = inventory_names[type(args)]

        cache = IntegrationCache(args)

        target_dependencies = sorted(
            [target] + list(cache.dependency_map.get(target.name, set())))

        files_needed = get_files_needed(target_dependencies)

        integration_dir = os.path.join(temp_dir, 'test/integration')
        ansible_config = os.path.join(integration_dir, '%s.cfg' % args.command)

        file_copies = [
            ('test/integration/%s.cfg' % args.command, ansible_config),
            ('test/integration/integration_config.yml',
             os.path.join(integration_dir, vars_file)),
            (inventory_path, os.path.join(integration_dir, inventory_name)),
        ]

        file_copies += [(path, os.path.join(temp_dir, path))
                        for path in files_needed]

        directory_copies = [(os.path.join('test/integration/targets',
                                          target.name),
                             os.path.join(integration_dir, 'targets',
                                          target.name))
                            for target in target_dependencies]

        inventory_dir = os.path.dirname(inventory_path)

        host_vars_dir = os.path.join(inventory_dir, 'host_vars')
        group_vars_dir = os.path.join(inventory_dir, 'group_vars')

        if os.path.isdir(host_vars_dir):
            directory_copies.append(
                (host_vars_dir,
                 os.path.join(integration_dir,
                              os.path.basename(host_vars_dir))))

        if os.path.isdir(group_vars_dir):
            directory_copies.append(
                (group_vars_dir,
                 os.path.join(integration_dir,
                              os.path.basename(group_vars_dir))))

        directory_copies = sorted(set(directory_copies))
        file_copies = sorted(set(file_copies))

        if not args.explain:
            make_dirs(integration_dir)

        for dir_src, dir_dst in directory_copies:
            display.info('Copying %s/ to %s/' % (dir_src, dir_dst),
                         verbosity=2)

            if not args.explain:
                shutil.copytree(dir_src, dir_dst, symlinks=True)

        for file_src, file_dst in file_copies:
            display.info('Copying %s to %s' % (file_src, file_dst),
                         verbosity=2)

            if not args.explain:
                make_dirs(os.path.dirname(file_dst))
                shutil.copy2(file_src, file_dst)

        inventory_path = os.path.join(integration_dir, inventory_name)
        vars_file = os.path.join(integration_dir, vars_file)

        yield IntegrationEnvironment(integration_dir, inventory_path,
                                     ansible_config, vars_file)
    finally:
        if not args.explain:
            shutil.rmtree(temp_dir)
Esempio n. 16
0
def command_integration_filtered(args, targets):
    """
    :type args: IntegrationConfig
    :type targets: tuple[IntegrationTarget]
    """
    found = False

    targets_iter = iter(targets)

    test_dir = os.path.expanduser('~/ansible_testing')

    if not args.explain:
        remove_tree(test_dir)
        make_dirs(test_dir)

    if any('needs/ssh/' in target.aliases for target in targets):
        max_tries = 20
        display.info(
            'SSH service required for tests. Checking to make sure we can connect.'
        )
        for i in range(1, max_tries + 1):
            try:
                run_command(args,
                            ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'],
                            capture=True)
                display.info('SSH service responded.')
                break
            except SubprocessError as ex:
                if i == max_tries:
                    raise ex
                seconds = 3
                display.warning(
                    'SSH service not responding. Waiting %d second(s) before checking again.'
                    % seconds)
                time.sleep(seconds)

    start_at_task = args.start_at_task

    for target in targets_iter:
        if args.start_at and not found:
            found = target.name == args.start_at

            if not found:
                continue

        tries = 2 if args.retry_on_error else 1
        verbosity = args.verbosity

        try:
            while tries:
                tries -= 1

                try:
                    if target.script_path:
                        command_integration_script(args, target)
                    else:
                        command_integration_role(args, target, start_at_task)
                        start_at_task = None
                    break
                except SubprocessError:
                    if not tries:
                        raise

                    display.warning(
                        'Retrying test target "%s" with maximum verbosity.' %
                        target.name)
                    display.verbosity = args.verbosity = 6
        except:
            display.notice(
                'To resume at this test target, use the option: --start-at %s'
                % target.name)

            next_target = next(targets_iter, None)

            if next_target:
                display.notice(
                    'To resume after this test target, use the option: --start-at %s'
                    % next_target.name)

            raise
        finally:
            display.verbosity = args.verbosity = verbosity
Esempio n. 17
0
    def test(self, args, targets, python_version):
        """
        :type args: SanityConfig
        :type targets: SanityTargets
        :type python_version: str
        :rtype: TestResult
        """

        #skip_file = 'test/sanity/import/skip.txt'
        skip_file = os.path.join(
            os.path.dirname(ansible_test.__file__),
            'lib/sanity/import/skip.txt'
        )

        skip_paths = read_lines_without_comments(skip_file, remove_blank_lines=True)
        skip_paths_set = set(skip_paths)

        paths = sorted(
            i.path
            for i in targets.include
            if os.path.splitext(i.path)[1] == '.py' and
            (i.path.startswith('lib/ansible/modules/') or i.path.startswith('lib/ansible/module_utils/')) and
            i.path not in skip_paths_set
        )

        if not paths:
            return SanitySkipped(self.name, python_version=python_version)

        env = ansible_environment(args, color=False)

        # create a clean virtual environment to minimize the available imports beyond the python standard library
        virtual_environment_path = os.path.abspath('test/runner/.tox/minimal-py%s' % python_version.replace('.', ''))
        virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')

        remove_tree(virtual_environment_path)

        python = find_python(python_version)

        cmd = [python, '-m', 'virtualenv', virtual_environment_path, '--python', python, '--no-setuptools', '--no-wheel']

        if not args.coverage:
            cmd.append('--no-pip')

        run_command(args, cmd, capture=True)

        # add the importer to our virtual environment so it can be accessed through the coverage injector
        importer_path = os.path.join(virtual_environment_bin, 'importer.py')
        if not args.explain:
            os.symlink(os.path.abspath('test/sanity/import/importer.py'), importer_path)

        # create a minimal python library
        python_path = os.path.abspath('test/runner/.tox/import/lib')
        ansible_path = os.path.join(python_path, 'ansible')
        ansible_init = os.path.join(ansible_path, '__init__.py')
        ansible_link = os.path.join(ansible_path, 'module_utils')

        if not args.explain:
            make_dirs(ansible_path)

            with open(ansible_init, 'w'):
                pass

            if not os.path.exists(ansible_link):
                os.symlink('../../../../../../lib/ansible/module_utils', ansible_link)

        # activate the virtual environment
        env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
        env['PYTHONPATH'] = python_path

        # make sure coverage is available in the virtual environment if needed
        if args.coverage:
            run_command(args, generate_pip_install(['pip'], 'sanity.import', packages=['setuptools']), env=env)
            run_command(args, generate_pip_install(['pip'], 'sanity.import', packages=['coverage']), env=env)
            run_command(args, ['pip', 'uninstall', '--disable-pip-version-check', '-y', 'setuptools'], env=env)
            run_command(args, ['pip', 'uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env)

        cmd = ['importer.py']

        data = '\n'.join(paths)

        display.info(data, verbosity=4)

        results = []

        virtualenv_python = os.path.join(virtual_environment_bin, 'python')

        try:
            stdout, stderr = intercept_command(args, cmd, self.name, env, capture=True, data=data, python_version=python_version, virtualenv=virtualenv_python)

            if stdout or stderr:
                raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
        except SubprocessError as ex:
            if ex.status != 10 or ex.stderr or not ex.stdout:
                raise

            pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'

            results = parse_to_list_of_dict(pattern, ex.stdout)

            results = [SanityMessage(
                message=r['message'],
                path=r['path'],
                line=int(r['line']),
                column=int(r['column']),
            ) for r in results]

            results = [result for result in results if result.path not in skip_paths_set]

        if results:
            return SanityFailure(self.name, messages=results, python_version=python_version)

        return SanitySuccess(self.name, python_version=python_version)
Esempio n. 18
0
def command_integration_filtered(args, targets, all_targets):
    """
    :type args: IntegrationConfig
    :type targets: tuple[IntegrationTarget]
    :type all_targets: tuple[IntegrationTarget]
    """
    found = False
    passed = []
    failed = []

    targets_iter = iter(targets)
    all_targets_dict = dict((target.name, target) for target in all_targets)

    setup_errors = []
    setup_targets_executed = set()

    for target in all_targets:
        for setup_target in target.setup_once + target.setup_always:
            if setup_target not in all_targets_dict:
                setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))

    if setup_errors:
        raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))

    test_dir = os.path.expanduser('~/ansible_testing')

    if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
        max_tries = 20
        display.info('SSH service required for tests. Checking to make sure we can connect.')
        for i in range(1, max_tries + 1):
            try:
                run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
                display.info('SSH service responded.')
                break
            except SubprocessError:
                if i == max_tries:
                    raise
                seconds = 3
                display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
                time.sleep(seconds)

    start_at_task = args.start_at_task

    results = {}

    for target in targets_iter:
        if args.start_at and not found:
            found = target.name == args.start_at

            if not found:
                continue

        if args.list_targets:
            print(target.name)
            continue

        tries = 2 if args.retry_on_error else 1
        verbosity = args.verbosity

        cloud_environment = get_cloud_environment(args, target)

        original_environment = EnvironmentDescription(args)

        display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)

        try:
            while tries:
                tries -= 1

                try:
                    run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, False)

                    start_time = time.time()

                    run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, True)

                    if not args.explain:
                        # create a fresh test directory for each test target
                        remove_tree(test_dir)
                        make_dirs(test_dir)

                    if target.script_path:
                        command_integration_script(args, target)
                    else:
                        command_integration_role(args, target, start_at_task)
                        start_at_task = None

                    end_time = time.time()

                    results[target.name] = dict(
                        name=target.name,
                        type=target.type,
                        aliases=target.aliases,
                        modules=target.modules,
                        run_time_seconds=int(end_time - start_time),
                        setup_once=target.setup_once,
                        setup_always=target.setup_always,
                        coverage=args.coverage,
                        coverage_label=args.coverage_label,
                        python_version=args.python_version,
                    )

                    break
                except SubprocessError:
                    if cloud_environment:
                        cloud_environment.on_failure(target, tries)

                    if not original_environment.validate(target.name, throw=False):
                        raise

                    if not tries:
                        raise

                    display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
                    display.verbosity = args.verbosity = 6

            original_environment.validate(target.name, throw=True)
            passed.append(target)
        except Exception as ex:
            failed.append(target)

            if args.continue_on_error:
                display.error(ex)
                continue

            display.notice('To resume at this test target, use the option: --start-at %s' % target.name)

            next_target = next(targets_iter, None)

            if next_target:
                display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)

            raise
        finally:
            display.verbosity = args.verbosity = verbosity

    if not args.explain:
        results_path = 'test/results/data/%s-%s.json' % (args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))

        data = dict(
            targets=results,
        )

        with open(results_path, 'w') as results_fd:
            results_fd.write(json.dumps(data, sort_keys=True, indent=4))

    if failed:
        raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
            len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
Esempio n. 19
0
def command_integration_filtered(args, targets):
    """
    :type args: IntegrationConfig
    :type targets: tuple[IntegrationTarget]
    """
    found = False
    passed = []
    failed = []

    targets_iter = iter(targets)

    test_dir = os.path.expanduser('~/ansible_testing')

    if not args.explain and any('needs/ssh/' in target.aliases
                                for target in targets):
        max_tries = 20
        display.info(
            'SSH service required for tests. Checking to make sure we can connect.'
        )
        for i in range(1, max_tries + 1):
            try:
                run_command(args,
                            ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'],
                            capture=True)
                display.info('SSH service responded.')
                break
            except SubprocessError:
                if i == max_tries:
                    raise
                seconds = 3
                display.warning(
                    'SSH service not responding. Waiting %d second(s) before checking again.'
                    % seconds)
                time.sleep(seconds)

    start_at_task = args.start_at_task

    for target in targets_iter:
        if args.start_at and not found:
            found = target.name == args.start_at

            if not found:
                continue

        if args.list_targets:
            print(target.name)
            continue

        tries = 2 if args.retry_on_error else 1
        verbosity = args.verbosity

        cloud_environment = get_cloud_environment(args, target)

        original_environment = EnvironmentDescription(args)

        display.info('>>> Environment Description\n%s' % original_environment,
                     verbosity=3)

        try:
            while tries:
                tries -= 1

                if not args.explain:
                    # create a fresh test directory for each test target
                    remove_tree(test_dir)
                    make_dirs(test_dir)

                try:
                    if target.script_path:
                        command_integration_script(args, target)
                    else:
                        command_integration_role(args, target, start_at_task)
                        start_at_task = None
                    break
                except SubprocessError:
                    if cloud_environment:
                        cloud_environment.on_failure(target, tries)

                    if not original_environment.validate(target.name,
                                                         throw=False):
                        raise

                    if not tries:
                        raise

                    display.warning(
                        'Retrying test target "%s" with maximum verbosity.' %
                        target.name)
                    display.verbosity = args.verbosity = 6

            original_environment.validate(target.name, throw=True)
            passed.append(target)
        except Exception as ex:
            failed.append(target)

            if args.continue_on_error:
                display.error(ex)
                continue

            display.notice(
                'To resume at this test target, use the option: --start-at %s'
                % target.name)

            next_target = next(targets_iter, None)

            if next_target:
                display.notice(
                    'To resume after this test target, use the option: --start-at %s'
                    % next_target.name)

            raise
        finally:
            display.verbosity = args.verbosity = verbosity

    if failed:
        raise ApplicationError(
            'The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s'
            % (len(failed), len(passed) + len(failed), '\n'.join(
                target.name for target in failed)))
Esempio n. 20
0
def command_integration_filtered(args, targets, all_targets):
    """
    :type args: IntegrationConfig
    :type targets: tuple[IntegrationTarget]
    :type all_targets: tuple[IntegrationTarget]
    """
    found = False
    passed = []
    failed = []

    targets_iter = iter(targets)
    all_targets_dict = dict((target.name, target) for target in all_targets)

    setup_errors = []
    setup_targets_executed = set()

    for target in all_targets:
        for setup_target in target.setup_once + target.setup_always:
            if setup_target not in all_targets_dict:
                setup_errors.append('Target "%s" contains invalid setup target: %s' % (target.name, setup_target))

    if setup_errors:
        raise ApplicationError('Found %d invalid setup aliases:\n%s' % (len(setup_errors), '\n'.join(setup_errors)))

    test_dir = os.path.expanduser('~/ansible_testing')

    if not args.explain and any('needs/ssh/' in target.aliases for target in targets):
        max_tries = 20
        display.info('SSH service required for tests. Checking to make sure we can connect.')
        for i in range(1, max_tries + 1):
            try:
                run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
                display.info('SSH service responded.')
                break
            except SubprocessError:
                if i == max_tries:
                    raise
                seconds = 3
                display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
                time.sleep(seconds)

    start_at_task = args.start_at_task

    results = {}

    for target in targets_iter:
        if args.start_at and not found:
            found = target.name == args.start_at

            if not found:
                continue

        if args.list_targets:
            print(target.name)
            continue

        tries = 2 if args.retry_on_error else 1
        verbosity = args.verbosity

        cloud_environment = get_cloud_environment(args, target)

        original_environment = EnvironmentDescription(args)

        display.info('>>> Environment Description\n%s' % original_environment, verbosity=3)

        try:
            while tries:
                tries -= 1

                try:
                    run_setup_targets(args, test_dir, target.setup_once, all_targets_dict, setup_targets_executed, False)

                    start_time = time.time()

                    run_setup_targets(args, test_dir, target.setup_always, all_targets_dict, setup_targets_executed, True)

                    if not args.explain:
                        # create a fresh test directory for each test target
                        remove_tree(test_dir)
                        make_dirs(test_dir)

                    if target.script_path:
                        command_integration_script(args, target)
                    else:
                        command_integration_role(args, target, start_at_task)
                        start_at_task = None

                    end_time = time.time()

                    results[target.name] = dict(
                        name=target.name,
                        type=target.type,
                        aliases=target.aliases,
                        modules=target.modules,
                        run_time_seconds=int(end_time - start_time),
                        setup_once=target.setup_once,
                        setup_always=target.setup_always,
                        coverage=args.coverage,
                        coverage_label=args.coverage_label,
                        python_version=args.python_version,
                    )

                    break
                except SubprocessError:
                    if cloud_environment:
                        cloud_environment.on_failure(target, tries)

                    if not original_environment.validate(target.name, throw=False):
                        raise

                    if not tries:
                        raise

                    display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
                    display.verbosity = args.verbosity = 6

            original_environment.validate(target.name, throw=True)
            passed.append(target)
        except Exception as ex:
            failed.append(target)

            if args.continue_on_error:
                display.error(ex)
                continue

            display.notice('To resume at this test target, use the option: --start-at %s' % target.name)

            next_target = next(targets_iter, None)

            if next_target:
                display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)

            raise
        finally:
            display.verbosity = args.verbosity = verbosity

    if not args.explain:
        results_path = 'test/results/data/%s-%s.json' % (args.command, re.sub(r'[^0-9]', '-', str(datetime.datetime.utcnow().replace(microsecond=0))))

        data = dict(
            targets=results,
        )

        with open(results_path, 'w') as results_fd:
            results_fd.write(json.dumps(data, sort_keys=True, indent=4))

    if failed:
        raise ApplicationError('The %d integration test(s) listed below (out of %d) failed. See error output above for details:\n%s' % (
            len(failed), len(passed) + len(failed), '\n'.join(target.name for target in failed)))
Esempio n. 21
0
def command_integration_filtered(args, targets):
    """
    :type args: IntegrationConfig
    :type targets: tuple[IntegrationTarget]
    """
    found = False

    targets_iter = iter(targets)

    test_dir = os.path.expanduser('~/ansible_testing')

    if not args.explain:
        remove_tree(test_dir)
        make_dirs(test_dir)

    if any('needs/ssh/' in target.aliases for target in targets):
        max_tries = 20
        display.info('SSH service required for tests. Checking to make sure we can connect.')
        for i in range(1, max_tries + 1):
            try:
                run_command(args, ['ssh', '-o', 'BatchMode=yes', 'localhost', 'id'], capture=True)
                display.info('SSH service responded.')
                break
            except SubprocessError as ex:
                if i == max_tries:
                    raise ex
                seconds = 3
                display.warning('SSH service not responding. Waiting %d second(s) before checking again.' % seconds)
                time.sleep(seconds)

    start_at_task = args.start_at_task

    for target in targets_iter:
        if args.start_at and not found:
            found = target.name == args.start_at

            if not found:
                continue

        tries = 2 if args.retry_on_error else 1
        verbosity = args.verbosity

        try:
            while tries:
                tries -= 1

                try:
                    if target.script_path:
                        command_integration_script(args, target)
                    else:
                        command_integration_role(args, target, start_at_task)
                        start_at_task = None
                    break
                except SubprocessError:
                    if not tries:
                        raise

                    display.warning('Retrying test target "%s" with maximum verbosity.' % target.name)
                    display.verbosity = args.verbosity = 6
        except:
            display.notice('To resume at this test target, use the option: --start-at %s' % target.name)

            next_target = next(targets_iter, None)

            if next_target:
                display.notice('To resume after this test target, use the option: --start-at %s' % next_target.name)

            raise
        finally:
            display.verbosity = args.verbosity = verbosity
Esempio n. 22
0
        return images


if __name__ == '__main__':
    args = parser.parse_args()
    args.train = False
    print_test_args(args)

    # use GPU if available.
    device = torch.device(
        'cuda' if torch.cuda.is_available() and not args.cpu else 'cpu')
    print_('\tUsing device: {}.\n'.format(device))

    # create output directory.
    make_dirs(args.out_dir)

    # load test data.
    img_transform = transforms.Compose([
        transforms.ToTensor(),
        transforms.Normalize(mean=opt.MEAN, std=opt.STD)
    ])

    dataset = HDRTestDataset(args.test_dir, img_transform)
    iterator_test_set = data.DataLoader(dataset, batch_size=1)
    print_('\tLoaded {} test images.\n'.format(len(dataset)))

    model = SoftConvNotLearnedMaskUNet().to(device)
    load_ckpt(args.weights, [('model', model)])

    print_("Starting prediction...\n\n")
Esempio n. 23
0
    def test(self, args, targets, python_version):
        """
        :type args: SanityConfig
        :type targets: SanityTargets
        :type python_version: str
        :rtype: TestResult
        """
        settings = self.load_processor(args, python_version)

        paths = [target.path for target in targets.include]

        env = ansible_environment(args, color=False)

        # create a clean virtual environment to minimize the available imports beyond the python standard library
        virtual_environment_path = os.path.abspath(
            'test/runner/.tox/minimal-py%s' % python_version.replace('.', ''))
        virtual_environment_bin = os.path.join(virtual_environment_path, 'bin')

        remove_tree(virtual_environment_path)

        python = find_python(python_version)

        cmd = [
            python, '-m', 'virtualenv', virtual_environment_path, '--python',
            python, '--no-setuptools', '--no-wheel'
        ]

        if not args.coverage:
            cmd.append('--no-pip')

        run_command(args, cmd, capture=True)

        # add the importer to our virtual environment so it can be accessed through the coverage injector
        importer_path = os.path.join(virtual_environment_bin, 'importer.py')
        if not args.explain:
            os.symlink(
                os.path.abspath(
                    os.path.join(ANSIBLE_ROOT,
                                 'test/sanity/import/importer.py')),
                importer_path)

        # create a minimal python library
        python_path = os.path.abspath('test/runner/.tox/import/lib')
        ansible_path = os.path.join(python_path, 'ansible')
        ansible_init = os.path.join(ansible_path, '__init__.py')
        ansible_link = os.path.join(ansible_path, 'module_utils')

        if not args.explain:
            remove_tree(ansible_path)

            make_dirs(ansible_path)

            with open(ansible_init, 'w'):
                pass

            os.symlink(os.path.join(ANSIBLE_ROOT, 'lib/ansible/module_utils'),
                       ansible_link)

            if data_context().content.collection:
                # inject just enough Ansible code for the collections loader to work on all supported Python versions
                # the __init__.py files are needed only for Python 2.x
                # the empty modules directory is required for the collection loader to generate the synthetic packages list

                make_dirs(os.path.join(ansible_path, 'utils'))
                with open(os.path.join(ansible_path, 'utils/__init__.py'),
                          'w'):
                    pass

                os.symlink(
                    os.path.join(ANSIBLE_ROOT,
                                 'lib/ansible/utils/collection_loader.py'),
                    os.path.join(ansible_path, 'utils/collection_loader.py'))
                os.symlink(
                    os.path.join(ANSIBLE_ROOT,
                                 'lib/ansible/utils/singleton.py'),
                    os.path.join(ansible_path, 'utils/singleton.py'))

                make_dirs(os.path.join(ansible_path, 'modules'))
                with open(os.path.join(ansible_path, 'modules/__init__.py'),
                          'w'):
                    pass

        # activate the virtual environment
        env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
        env['PYTHONPATH'] = python_path

        # make sure coverage is available in the virtual environment if needed
        if args.coverage:
            run_command(args,
                        generate_pip_install(['pip'],
                                             'sanity.import',
                                             packages=['setuptools']),
                        env=env)
            run_command(args,
                        generate_pip_install(['pip'],
                                             'sanity.import',
                                             packages=['coverage']),
                        env=env)
            run_command(args, [
                'pip', 'uninstall', '--disable-pip-version-check', '-y',
                'setuptools'
            ],
                        env=env)
            run_command(args, [
                'pip', 'uninstall', '--disable-pip-version-check', '-y', 'pip'
            ],
                        env=env)

        cmd = ['importer.py']

        data = '\n'.join(paths)

        display.info(data, verbosity=4)

        results = []

        virtualenv_python = os.path.join(virtual_environment_bin, 'python')

        try:
            with coverage_context(args):
                stdout, stderr = intercept_command(
                    args,
                    cmd,
                    self.name,
                    env,
                    capture=True,
                    data=data,
                    python_version=python_version,
                    virtualenv=virtualenv_python)

            if stdout or stderr:
                raise SubprocessError(cmd, stdout=stdout, stderr=stderr)
        except SubprocessError as ex:
            if ex.status != 10 or ex.stderr or not ex.stdout:
                raise

            pattern = r'^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'

            results = parse_to_list_of_dict(pattern, ex.stdout)

            results = [
                SanityMessage(
                    message=r['message'],
                    path=r['path'],
                    line=int(r['line']),
                    column=int(r['column']),
                ) for r in results
            ]

        results = settings.process_errors(results, paths)

        if results:
            return SanityFailure(self.name,
                                 messages=results,
                                 python_version=python_version)

        return SanitySuccess(self.name, python_version=python_version)