def command_integration_filter(args, targets, init_callback=None): """ :type args: IntegrationConfig :type targets: collections.Iterable[IntegrationTarget] :type init_callback: (IntegrationConfig, tuple[IntegrationTarget]) -> None :rtype: tuple[IntegrationTarget] """ targets = tuple(target for target in targets if 'hidden/' not in target.aliases) changes = get_changes_filter(args) require = (args.require or []) + changes exclude = (args.exclude or []) internal_targets = walk_internal_targets(targets, args.include, exclude, require) environment_exclude = get_integration_filter(args, internal_targets) environment_exclude += cloud_filter(args, internal_targets) if environment_exclude: exclude += environment_exclude internal_targets = walk_internal_targets(targets, args.include, exclude, require) if not internal_targets: raise AllTargetsSkipped() if args.start_at and not any(t.name == args.start_at for t in internal_targets): raise ApplicationError('Start at target matches nothing: %s' % args.start_at) if init_callback: init_callback(args, internal_targets) cloud_init(args, internal_targets) if args.delegate: raise Delegate(require=changes, exclude=exclude) install_command_requirements(args) return internal_targets
def docker_run(args, image, options): """ :type args: EnvironmentConfig :type image: str :type options: list[str] | None :rtype: str | None, str | None """ if not options: options = [] for _ in range(1, 3): try: return docker_command(args, ['run'] + options + [image], capture=True) except SubprocessError as ex: display.error(ex) display.warning('Failed to run docker image "%s". Waiting a few seconds before trying again.' % image) time.sleep(3) raise ApplicationError('Failed to run docker image "%s".' % image)
def get_files_needed(target_dependencies): """ :type target_dependencies: list[IntegrationTarget] :rtype: list[str] """ files_needed = [] for target_dependency in target_dependencies: files_needed += target_dependency.needs_file files_needed = sorted(set(files_needed)) invalid_paths = [path for path in files_needed if not os.path.isfile(path)] if invalid_paths: raise ApplicationError('Invalid "needs/file/*" aliases:\n%s' % '\n'.join(invalid_paths)) return files_needed
def __init__(self, args, git): """ :type args: CommonConfig :type git: Git """ self.args = args self.current_branch = git.get_branch() if self.is_official_branch(self.current_branch): raise InvalidBranch( branch=self.current_branch, reason='Current branch is not a feature branch.') self.fork_branch = None self.fork_point = None self.local_branches = sorted(git.get_branches()) self.official_branches = sorted( [b for b in self.local_branches if self.is_official_branch(b)]) for self.fork_branch in self.official_branches: try: self.fork_point = git.get_branch_fork_point(self.fork_branch) break except SubprocessError: pass if self.fork_point is None: raise ApplicationError( 'Unable to auto-detect fork branch and fork point.') # tracked files (including unchanged) self.tracked = sorted(git.get_file_names(['--cached'])) # untracked files (except ignored) self.untracked = sorted( git.get_file_names(['--others', '--exclude-standard'])) # tracked changes (including deletions) committed since the branch was forked self.committed = sorted(git.get_diff_names([self.fork_point, 'HEAD'])) # tracked changes (including deletions) which are staged self.staged = sorted(git.get_diff_names(['--cached'])) # tracked changes (including deletions) which are not staged self.unstaged = sorted(git.get_diff_names([]))
def command_compile(args): """ :type args: CompileConfig """ changes = get_changes_filter(args) require = (args.require or []) + changes include, exclude = walk_external_targets(walk_compile_targets(), args.include, args.exclude, require) if not include: raise AllTargetsSkipped() if args.delegate: raise Delegate(require=changes) install_command_requirements(args) total = 0 failed = [] for version in COMPILE_PYTHON_VERSIONS: # run all versions unless version given, in which case run only that version if args.python and version != args.python_version: continue display.info('Compile with Python %s' % version) result = compile_version(args, version, include, exclude) result.write(args) total += 1 if isinstance(result, TestFailure): failed.append('compile --python %s' % version) if failed: message = 'The %d compile test(s) listed below (out of %d) failed. See error output above for details.\n%s' % ( len(failed), total, '\n'.join(failed)) if args.failure_ok: display.error(message) else: raise ApplicationError(message)
def delegate_tox(args, exclude, require): """ :type args: EnvironmentConfig :type exclude: list[str] :type require: list[str] """ if args.python: versions = args.python, if args.python not in SUPPORTED_PYTHON_VERSIONS: raise ApplicationError('tox does not support Python version %s' % args.python) else: versions = SUPPORTED_PYTHON_VERSIONS options = { '--tox': args.tox_args, '--tox-sitepackages': 0, } for version in versions: tox = [ 'tox', '-c', 'test/runner/tox.ini', '-e', 'py' + version.replace('.', '') ] if args.tox_sitepackages: tox.append('--sitepackages') tox.append('--') cmd = generate_command(args, os.path.abspath('test/runner/test.py'), options, exclude, require) if not args.python: cmd += ['--python', version] if isinstance(args, TestConfig): if args.coverage and not args.coverage_label: cmd += ['--coverage-label', 'tox-%s' % version] run_command(args, tox + cmd)
def _wait_for_service(self, protocol, acme_host, port, local_part, name): """Wait for an endpoint to accept connections.""" if self.args.explain: return client = HttpClient(self.args, always=True, insecure=True) endpoint = '%s://%s:%d/%s' % (protocol, acme_host, port, local_part) for dummy in range(1, 30): display.info('Waiting for %s: %s' % (name, endpoint), verbosity=1) try: client.get(endpoint) return except SubprocessError: pass time.sleep(1) raise ApplicationError('Timeout waiting for %s.' % name)
def wait(self): # type: () -> str """Wait for instance to respond to SSH.""" for dummy in range(1, 90): try: stdout = self.ssh('pwd', capture=True)[0] if self.core_ci.args.explain: return '/pwd' pwd = stdout.strip().splitlines()[-1] if not pwd.startswith('/'): raise Exception('Unexpected current working directory "%s" from "pwd" command output:\n%s' % (pwd, stdout)) return pwd except SubprocessError: time.sleep(10) raise ApplicationError('Timeout waiting for %s/%s instance %s.' % (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
def _wait_for_service(self): """Wait for the CloudStack service endpoint to accept connections.""" if self.args.explain: return client = HttpClient(self.args, always=True) endpoint = self.endpoint for _ in range(1, 30): display.info('Waiting for CloudStack service: %s' % endpoint, verbosity=1) try: client.get(endpoint) return except SubprocessError: pass time.sleep(30) raise ApplicationError('Timeout waiting for CloudStack service.')
def initialize_coverage(args): """ :type args: CoverageConfig :rtype: coverage """ if args.delegate: raise Delegate() if args.requirements: install_command_requirements(args) try: import coverage except ImportError: coverage = None if not coverage: raise ApplicationError('You must install the "coverage" python module to use this command.') return coverage
def _get_parallels_endpoints(self): """ :rtype: tuple[str] """ client = HttpClient(self.args, always=True) display.info('Getting available endpoints...', verbosity=1) sleep = 3 for _ in range(1, 10): response = client.get('https://s3.amazonaws.com/ansible-ci-files/ansible-test/parallels-endpoints.txt') if response.status_code == 200: endpoints = tuple(response.response.splitlines()) display.info('Available endpoints (%d):\n%s' % (len(endpoints), '\n'.join(' - %s' % endpoint for endpoint in endpoints)), verbosity=1) return endpoints display.warning('HTTP %d error getting endpoints, trying again in %d seconds.' % (response.status_code, sleep)) time.sleep(sleep) raise ApplicationError('Unable to get available endpoints.')
def _wait_for_service(self): """Wait for the VCenter service endpoint to accept connections.""" if self.args.explain: return client = HttpClient(self.args, always=True, insecure=self.insecure) endpoint = 'https://%s:%s' % (self.endpoint, self.port) for i in range(1, 30): display.info('Waiting for VCenter service: %s' % endpoint, verbosity=1) try: client.get(endpoint) return except SubprocessError: pass time.sleep(10) raise ApplicationError('Timeout waiting for VCenter service.')
def _get_credentials(self): """Wait for the CloudStack simulator to return credentials. :rtype: dict[str, str] """ client = HttpClient(self.args, always=True) endpoint = '%s/admin.json' % self.endpoint for _ in range(1, 30): display.info('Waiting for CloudStack credentials: %s' % endpoint, verbosity=1) response = client.get(endpoint) if response.status_code == 200: try: return response.json() except HttpError as ex: display.error(ex) time.sleep(30) raise ApplicationError('Timeout waiting for CloudStack credentials.')
def _wait_for_service(self, endpoint): """Wait for the OpenShift service endpoint to accept connections. :type endpoint: str """ if self.args.explain: return client = HttpClient(self.args, always=True, insecure=True) for dummy in range(1, 30): display.info('Waiting for OpenShift service: %s' % endpoint, verbosity=1) try: client.get(endpoint) return except SubprocessError: pass time.sleep(10) raise ApplicationError('Timeout waiting for OpenShift service.')
def docker_pull(args, image): """ :type args: EnvironmentConfig :type image: str """ if not args.docker_pull: display.warning( 'Skipping docker pull for "%s". Image may be out-of-date.' % image) return for _ in range(1, 10): try: docker_command(args, ['pull', image]) return except SubprocessError: display.warning( 'Failed to pull docker image "%s". Waiting a few seconds before trying again.' % image) time.sleep(3) raise ApplicationError('Failed to pull docker image "%s".' % image)
def extract_python_module_utils_imports(path, module_utils): """Return a list of module_utils imports found in the specified source file. :type path: str :type module_utils: set[str] :rtype: set[str] """ with open(path, 'r') as module_fd: code = module_fd.read() try: tree = ast.parse(code) except SyntaxError as ex: # Setting the full path to the filename results in only the filename being given for str(ex). # As a work-around, set the filename to a UUID and replace it in the final string output with the actual path. ex.filename = str(uuid.uuid4()) error = str(ex).replace(ex.filename, path) raise ApplicationError('AST parse error: %s' % error) finder = ModuleUtilFinder(path, module_utils) finder.visit(tree) return finder.imports
def command_windows_integration(args): """ :type args: WindowsIntegrationConfig """ filename = 'test/integration/inventory.winrm' if not args.explain and not args.windows and not os.path.isfile(filename): raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename) internal_targets = command_integration_filter(args, walk_windows_integration_targets()) if args.windows: instances = [] # type: list [lib.thread.WrappedThread] for version in args.windows: instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version)) instance.daemon = True instance.start() instances.append(instance) install_command_requirements(args) while any(instance.is_alive() for instance in instances): time.sleep(1) remotes = [instance.wait_for_result() for instance in instances] inventory = windows_inventory(remotes) display.info('>>> Inventory: %s\n%s' % (filename, inventory.strip()), verbosity=3) if not args.explain: with open(filename, 'w') as inventory_fd: inventory_fd.write(inventory) else: install_command_requirements(args) try: command_integration_filtered(args, internal_targets) finally: pass
def data_init(): # type: () -> DataContext """Initialize provider plugins.""" provider_types = ( 'layout', 'source', ) for provider_type in provider_types: import_plugins('provider/%s' % provider_type) try: context = DataContext() except ProviderNotFoundForPath: raise ApplicationError( '''The current working directory must be at or below one of: - Ansible source: %s/ - Ansible collection: {...}/ansible_collections/{namespace}/{collection}/ Current working directory: %s''' % (ANSIBLE_ROOT, os.getcwd())) return context
def docker_pull(args, image): """ :type args: EnvironmentConfig :type image: str """ if ('@' in image or ':' in image) and docker_images(args, image): display.info('Skipping docker pull of existing image with tag or digest: %s' % image, verbosity=2) return if not args.docker_pull: display.warning('Skipping docker pull for "%s". Image may be out-of-date.' % image) return for _iteration in range(1, 10): try: docker_command(args, ['pull', image]) return except SubprocessError: display.warning('Failed to pull docker image "%s". Waiting a few seconds before trying again.' % image) time.sleep(3) raise ApplicationError('Failed to pull docker image "%s".' % image)
def wait(self): """Wait for instance to respond to ansible ping.""" extra_vars = [ 'ansible_host=%s' % self.core_ci.connection.hostname, 'ansible_port=%s' % self.core_ci.connection.port, 'ansible_connection=local', 'ansible_ssh_private_key_file=%s' % self.core_ci.ssh_key.key, ] name = '%s-%s' % (self.core_ci.platform, self.core_ci.version.replace('.', '-')) env = ansible_environment(self.core_ci.args) cmd = [ 'ansible', '-m', '%s_command' % self.core_ci.platform, '-a', 'commands=?', '-u', self.core_ci.connection.username, '-i', '%s,' % name, '-e', ' '.join(extra_vars), name, ] for _ in range(1, 90): try: intercept_command(self.core_ci.args, cmd, 'ping', env=env) return except SubprocessError: sleep(10) continue raise ApplicationError('Timeout waiting for %s/%s instance %s.' % (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
def get_docker_container_id(): """ :rtype: str | None """ path = '/proc/self/cgroup' if not os.path.exists(path): return None with open(path) as cgroup_fd: contents = cgroup_fd.read() paths = [line.split(':')[2] for line in contents.splitlines()] container_ids = set(path.split('/')[2] for path in paths if path.startswith('/docker/')) if not container_ids: return None if len(container_ids) == 1: return container_ids.pop() raise ApplicationError('Found multiple container_id candidates: %s\n%s' % (sorted(container_ids), contents))
def __init__(self, lines): """ :type lines: list[str] """ self.lines = lines self.files = [] # type: t.List[FileDiff] self.action = self.process_start self.line_number = 0 self.previous_line = None # type: t.Optional[str] self.line = None # type: t.Optional[str] self.file = None # type: t.Optional[FileDiff] for self.line in self.lines: self.line_number += 1 try: self.action() except Exception as ex: message = textwrap.dedent(''' %s Line: %d Previous: %s Current: %s %s ''').strip() % ( ex, self.line_number, self.previous_line or '', self.line or '', traceback.format_exc(), ) raise ApplicationError(message.strip()) self.previous_line = self.line self.complete_file()
def ansible_environment(args, color=True): """ :type args: CommonConfig :type color: bool :rtype: dict[str, str] """ env = common_environment() path = env['PATH'] ansible_path = os.path.join(os.getcwd(), 'bin') if not path.startswith(ansible_path + os.pathsep): path = ansible_path + os.pathsep + path if isinstance(args, IntegrationConfig): ansible_config = 'test/integration/%s.cfg' % args.command else: ansible_config = 'test/%s/ansible.cfg' % args.command if not os.path.exists(ansible_config): raise ApplicationError('Configuration not found: %s' % ansible_config) ansible = dict( ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color and color else 'false', ANSIBLE_DEPRECATION_WARNINGS='false', ANSIBLE_HOST_KEY_CHECKING='false', ANSIBLE_CONFIG=os.path.abspath(ansible_config), PYTHONPATH=os.path.abspath('lib'), PAGER='/bin/cat', PATH=path, ) env.update(ansible) if args.debug: env.update(dict(ANSIBLE_DEBUG='true')) return env
def disable_pendo(self): """Disable Pendo tracking.""" display.info('Disable Pendo tracking') config = TowerConfig.parse(self.config_path) # tower-cli does not recognize TOWER_ environment variables cmd = ['tower-cli', 'setting', 'modify', 'PENDO_TRACKING_STATE', 'off', '-h', config.host, '-u', config.username, '-p', config.password] attempts = 60 while True: attempts -= 1 try: run_command(self.args, cmd, capture=True) return except SubprocessError as ex: if not attempts: raise ApplicationError('Timed out trying to disable Pendo tracking:\n%s' % ex) time.sleep(5)
def add_import(self, name, line_number): """ :type name: str :type line_number: int """ import_name = name while len(name) > len('ansible.module_utils.'): if name in self.module_utils: if name not in self.imports: display.info('%s:%d imports module_utils: %s' % (self.path, line_number, name), verbosity=5) self.imports.add(name) return # duplicate imports are ignored name = '.'.join(name.split('.')[:-1]) if self.path.startswith('test/'): return # invalid imports in tests are ignored raise ApplicationError('%s:%d Invalid module_utils import: %s' % (self.path, line_number, import_name))
def parse(path): """ :type path: str :rtype: TowerConfig """ parser = ConfigParser() parser.read(path) keys = ( 'version', 'host', 'username', 'password', ) values = dict((k, parser.get('default', k)) for k in keys) config = TowerConfig(values) missing = [k for k in keys if not values.get(k)] if missing: raise ApplicationError('Missing or empty Tower configuration value(s): %s' % ', '.join(missing)) return config
def command_sanity(args): """ :type args: SanityConfig """ changes = get_changes_filter(args) require = (args.require or []) + changes targets = SanityTargets(args.include, args.exclude, require) if not targets.include: raise AllTargetsSkipped() if args.delegate: raise Delegate(require=changes) install_command_requirements(args) tests = sanity_get_tests() if args.test: tests = [t for t in tests if t.name in args.test] else: disabled = [ t.name for t in tests if not t.enabled and not args.allow_disabled ] tests = [t for t in tests if t.enabled or args.allow_disabled] if disabled: display.warning( 'Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled))) if args.skip_test: tests = [t for t in tests if t.name not in args.skip_test] total = 0 failed = [] for test in tests: if args.list_tests: display.info(test.name) continue if isinstance(test, SanityMultipleVersion): versions = SUPPORTED_PYTHON_VERSIONS else: versions = (None, ) for version in versions: if args.python and version and version != args.python_version: continue display.info( 'Sanity check using %s%s' % (test.name, ' with Python %s' % version if version else '')) options = '' if isinstance(test, SanityCodeSmellTest): result = test.test(args, targets) elif isinstance(test, SanityMultipleVersion): result = test.test(args, targets, python_version=version) options = ' --python %s' % version elif isinstance(test, SanitySingleVersion): result = test.test(args, targets) else: raise Exception('Unsupported test type: %s' % type(test)) result.write(args) total += 1 if isinstance(result, SanityFailure): failed.append(result.test + options) if failed: message = 'The %d sanity test(s) listed below (out of %d) failed. See error output above for details.\n%s' % ( len(failed), total, '\n'.join(failed)) if args.failure_ok: display.error(message) else: raise ApplicationError(message)
def __init__(self, args, platform, version, stage='prod', persist=True, name=None): """ :type args: EnvironmentConfig :type platform: str :type version: str :type stage: str :type persist: bool :type name: str """ self.args = args self.platform = platform self.version = version self.stage = stage self.client = HttpClient(args) self.connection = None self.instance_id = None self.endpoint = None self.max_threshold = 1 self.name = name if name else '%s-%s' % (self.platform, self.version) self.ci_key = os.path.expanduser('~/.ansible-core-ci.key') aws_platforms = ( 'aws', 'azure', 'windows', 'freebsd', 'rhel', 'vyos', 'junos', 'ios', ) osx_platforms = ('osx', ) if self.platform in aws_platforms: if args.remote_aws_region: # permit command-line override of region selection region = args.remote_aws_region # use a dedicated CI key when overriding the region selection self.ci_key += '.%s' % args.remote_aws_region elif is_shippable(): # split Shippable jobs across multiple regions to maximize use of launch credits if self.platform == 'windows': region = 'us-east-2' else: region = 'us-east-1' else: # send all non-Shippable jobs to us-east-1 to reduce api key maintenance region = 'us-east-1' self.endpoints = AWS_ENDPOINTS[region], if self.platform == 'windows': self.ssh_key = None self.port = 5986 else: self.ssh_key = SshKey(args) self.port = 22 elif self.platform in osx_platforms: self.endpoints = self._get_parallels_endpoints() self.max_threshold = 6 self.ssh_key = SshKey(args) self.port = None else: raise ApplicationError('Unsupported platform: %s' % platform) self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s' % (self.name, self.stage)) if persist and self._load(): try: display.info('Checking existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1) self.connection = self.get(always_raise_on=[404]) display.info('Loaded existing %s/%s from: %s' % (self.platform, self.version, self._uri), verbosity=1) except HttpError as ex: if ex.status != 404: raise self._clear() display.info('Cleared stale %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1) self.instance_id = None self.endpoint = None else: self.instance_id = None self.endpoint = None self._clear() if self.instance_id: self.started = True else: self.started = False self.instance_id = str(uuid.uuid4()) self.endpoint = None
def test(self, args, targets): """ :type args: SanityConfig :type targets: SanityTargets :rtype: TestResult """ if self.path.endswith('.py'): cmd = [args.python_executable, self.path] else: cmd = [self.path] env = ansible_environment(args, color=False) pattern = None data = None if self.config: output = self.config.get('output') extensions = self.config.get('extensions') prefixes = self.config.get('prefixes') files = self.config.get('files') always = self.config.get('always') text = self.config.get('text') if output == 'path-line-column-message': pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$' elif output == 'path-message': pattern = '^(?P<path>[^:]*): (?P<message>.*)$' else: pattern = ApplicationError('Unsupported output type: %s' % output) paths = sorted(i.path for i in targets.include) if always: paths = [] # short-term work-around for paths being str instead of unicode on python 2.x if sys.version_info[0] == 2: paths = [p.decode('utf-8') for p in paths] if text is not None: if text: paths = [p for p in paths if not is_binary_file(p)] else: paths = [p for p in paths if is_binary_file(p)] if extensions: paths = [ p for p in paths if os.path.splitext(p)[1] in extensions or ( p.startswith('bin/') and '.py' in extensions) ] if prefixes: paths = [ p for p in paths if any( p.startswith(pre) for pre in prefixes) ] if files: paths = [p for p in paths if os.path.basename(p) in files] if not paths and not always: return SanitySkipped(self.name) data = '\n'.join(paths) if data: display.info(data, verbosity=4) try: stdout, stderr = run_command(args, cmd, data=data, env=env, capture=True) status = 0 except SubprocessError as ex: stdout = ex.stdout stderr = ex.stderr status = ex.status if stdout and not stderr: if pattern: matches = [ parse_to_dict(pattern, line) for line in stdout.splitlines() ] messages = [ SanityMessage( message=m['message'], path=m['path'], line=int(m.get('line', 0)), column=int(m.get('column', 0)), ) for m in matches ] return SanityFailure(self.name, messages=messages) if stderr or status: summary = u'%s' % SubprocessError( cmd=cmd, status=status, stderr=stderr, stdout=stdout) return SanityFailure(self.name, summary=summary) return SanitySuccess(self.name)
def command_sanity_pep8(args, targets): """ :type args: SanityConfig :type targets: SanityTargets """ skip_path = 'test/sanity/pep8/skip.txt' legacy_path = 'test/sanity/pep8/legacy-files.txt' with open(skip_path, 'r') as skip_fd: skip_paths = set(skip_fd.read().splitlines()) with open(legacy_path, 'r') as legacy_fd: legacy_paths = set(legacy_fd.read().splitlines()) with open('test/sanity/pep8/legacy-ignore.txt', 'r') as ignore_fd: legacy_ignore = set(ignore_fd.read().splitlines()) with open('test/sanity/pep8/current-ignore.txt', 'r') as ignore_fd: current_ignore = sorted(ignore_fd.read().splitlines()) paths = sorted( i.path for i in targets.include if os.path.splitext(i.path)[1] == '.py' and i.path not in skip_paths) if not paths: display.info('No tests applicable.', verbosity=1) return cmd = [ 'pep8', '--max-line-length', '160', '--config', '/dev/null', '--ignore', ','.join(sorted(current_ignore)), ] + paths try: stdout, stderr = run_command(args, cmd, capture=True) status = 0 except SubprocessError as ex: stdout = ex.stdout stderr = ex.stderr status = ex.status if stderr: raise SubprocessError(cmd=cmd, status=status, stderr=stderr) pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<code>[A-Z0-9]{4}) (?P<message>.*)$' results = [ re.search(pattern, line).groupdict() for line in stdout.splitlines() ] for result in results: for key in 'line', 'column': result[key] = int(result[key]) failed_result_paths = set([result['path'] for result in results]) passed_legacy_paths = set([ path for path in paths if path in legacy_paths and path not in failed_result_paths ]) errors = [] summary = {} for path in sorted(passed_legacy_paths): # Keep files out of the list which no longer require the relaxed rule set. errors.append( 'PEP 8: %s: Passes current rule set. Remove from legacy list (%s).' % (path, legacy_path)) for path in sorted(skip_paths): if not os.path.exists(path): # Keep files out of the list which no longer exist in the repo. errors.append( 'PEP 8: %s: Does not exist. Remove from skip list (%s).' % (path, skip_path)) for path in sorted(legacy_paths): if not os.path.exists(path): # Keep files out of the list which no longer exist in the repo. errors.append( 'PEP 8: %s: Does not exist. Remove from legacy list (%s).' % (path, legacy_path)) for result in results: path = result['path'] line = result['line'] column = result['column'] code = result['code'] message = result['message'] msg = 'PEP 8: %s:%s:%s: %s %s' % (path, line, column, code, message) if path in legacy_paths: msg += ' (legacy)' else: msg += ' (current)' if path in legacy_paths and code in legacy_ignore: # Files on the legacy list are permitted to have errors on the legacy ignore list. # However, we want to report on their existence to track progress towards eliminating these exceptions. display.info(msg, verbosity=3) key = '%s %s' % (code, re.sub('[0-9]+', 'NNN', message)) if key not in summary: summary[key] = 0 summary[key] += 1 else: # Files not on the legacy list and errors not on the legacy ignore list are PEP 8 policy errors. errors.append(msg) for error in errors: display.error(error) if summary: lines = [] count = 0 for key in sorted(summary): count += summary[key] lines.append('PEP 8: %5d %s' % (summary[key], key)) display.info( 'PEP 8: There were %d different legacy issues found (%d total):' % (len(summary), count), verbosity=1) display.info('PEP 8: Count Code Message', verbosity=1) for line in lines: display.info(line, verbosity=1) if errors: raise ApplicationError( 'PEP 8: There are %d issues which need to be resolved.' % len(errors))