def colorize_cmake(line): """Colorizes output from CMake :param line: one, new line terminated, line from `cmake` which needs coloring. :type line: str """ cline = sanitize(line) if line.startswith('-- '): cline = '@{cf}-- @|' + cline[len('-- '):] if ':' in cline: split_cline = cline.split(':') cline = split_cline[0] + ':@{yf}' + ':'.join(split_cline[1:]) + '@|' if line.lower().startswith('warning'): # WARNING cline = fmt('@{yf}') + cline if line.startswith('CMake Warning'): # CMake Warning... cline = cline.replace('CMake Warning', '@{yf}@!CMake Warning@|') if line.startswith('ERROR:'): # ERROR: cline = cline.replace('ERROR:', '@!@{rf}ERROR:@|') if line.startswith('CMake Error'): # CMake Error... cline = cline.replace('CMake Error', '@{rf}@!CMake Error@|') if line.startswith('Call Stack (most recent call first):'): # CMake Call Stack cline = cline.replace('Call Stack (most recent call first):', '@{cf}@_Call Stack (most recent call first):@|') return fmt(cline)
def colorize_cmake(line): """Colorizes output from CMake :param line: one, new line terminated, line from `cmake` which needs coloring. :type line: str """ cline = sanitize(line) if line.startswith('-- '): cline = '@{cf}-- @|' + cline[len('-- '):] if ':' in cline: split_cline = cline.split(':') cline = split_cline[0] + ':@{yf}' + ':'.join( split_cline[1:]) + '@|' if line.lower().startswith('warning'): # WARNING cline = fmt('@{yf}') + cline if line.startswith('CMake Warning'): # CMake Warning... cline = cline.replace('CMake Warning', '@{yf}@!CMake Warning@|') if line.startswith('ERROR:'): # ERROR: cline = cline.replace('ERROR:', '@!@{rf}ERROR:@|') if line.startswith('CMake Error'): # CMake Error... cline = cline.replace('CMake Error', '@{rf}@!CMake Error@|') if line.startswith('Call Stack (most recent call first):'): # CMake Call Stack cline = cline.replace('Call Stack (most recent call first):', '@{cf}@_Call Stack (most recent call first):@|') return fmt(cline)
def colorize_cmake(self, line): """Colorizes output from CMake This also prepends the source path to the locations of warnings and errors. :param line: one, new line terminated, line from `cmake` which needs coloring. :type line: str """ # return line cline = sanitize(line) if len(cline.strip()) == 0: return cline if line.startswith('-- '): cline = '@{cf}--@| ' + cline[len('-- '):] if ':' in cline: split_cline = cline.split(':', 1) if len(split_cline[1].strip()) > 0: cline = split_cline[0] + (':@{yf}%s@|' % split_cline[1]) elif line.lower().startswith('warning'): # WARNING cline = fmt('@{yf}', reset=False) + cline elif line.startswith('CMake Warning at '): # CMake Warning at... cline = cline.replace('CMake Warning at ', '@{yf}@!CMake Warning@| at ' + self.source_path + os.path.sep) elif line.startswith('CMake Warning (dev) at '): # CMake Warning at... cline = cline.replace( 'CMake Warning (dev) at ', '@{yf}@!CMake Warning (dev)@| at ' + self.source_path + os.path.sep) elif line.startswith('CMake Warning'): # CMake Warning... cline = cline.replace('CMake Warning', '@{yf}@!CMake Warning@|') elif line.startswith('ERROR:'): # ERROR: cline = cline.replace('ERROR:', '@!@{rf}ERROR:@|') elif line.startswith('CMake Error at '): # CMake Error... cline = cline.replace('CMake Error at ', '@{rf}@!CMake Error@| at ' + self.source_path + os.path.sep) elif line.startswith('CMake Error'): # CMake Error... cline = cline.replace('CMake Error', '@{rf}@!CMake Error@|') elif line.startswith('Call Stack (most recent call first):'): # CMake Call Stack cline = cline.replace('Call Stack (most recent call first):', '@{cf}@_Call Stack (most recent call first):@|') return fmt(cline, reset=False)
def clr(key): """Returns a colorized version of the string given. This is occomplished by either returning a hit from the color translation map or by calling :py:func:`fmt` on the string and returning it. :param key: string to be colorized :type key: str """ global _color_translation_map, _color_on if not _color_on: return fmt(key) val = _color_translation_map.get(key, None) if val is None: return fmt(key) return val
def main(opts): try: # Load a context with initialization ctx = Context.load(opts.workspace, strict=True) # Initialize the workspace if necessary if ctx: print( 'Catkin workspace `%s` is already initialized. No action taken.' % ctx.workspace) else: print('Initializing catkin workspace in `%s`.' % (opts.workspace or os.getcwd())) # initialize the workspace init_metadata_root(opts.workspace or os.getcwd(), opts.reset) ctx = Context.load(opts.workspace) print(ctx.summary()) except IOError as exc: # Usually happens if workspace is already underneath another catkin_tools workspace print( fmt('[init] @!@{rf}Error:@| Could not initialize catkin workspace: %s' % str(exc))) return 1 return 0
def colorize_run_tests(self, line): cline = sanitize(line).rstrip() for p, r in self.filters: if p.match(cline): lines = [fmt(r).format(line) for line in cline.splitlines()] cline = '\n'.join(lines) return cline + '\n'
def expand_one_verb_alias(sysargs, verb_aliases, used_aliases): """Iterate through sysargs looking for expandable verb aliases. When a verb alias is found, sysargs is modified to effectively expand the alias. The alias is removed from verb_aliases and added to used_aliases. After finding and expanding an alias, this function returns True. If no alias is found to be expanded, this function returns False. """ cmd = os.path.basename(sys.argv[0]) for index, arg in enumerate(sysargs): if arg.startswith("-"): # Not a verb, continue through the arguments continue if arg in used_aliases: print( fmt( "@!@{gf}==>@| Expanding alias '@!@{yf}" + arg + "@|' was previously expanded, ignoring this time to prevent infinite recursion." ) ) if arg in verb_aliases: before = [] if index == 0 else sysargs[: index - 1] after = [] if index == len(sysargs) else sysargs[index + 1 :] sysargs[:] = before + verb_aliases[arg].split() + after print( fmt( "@!@{gf}==>@| Expanding alias " "'@!@{yf}{alias}@|' " "from '@{yf}{before} @!{alias}@{boldoff}{after}@|' " "to '@{yf}{before} @!{expansion}@{boldoff}{after}@|'" ).format( alias=arg, expansion=verb_aliases[arg], before=" ".join([cmd] + before), after=(" ".join([""] + after) if after else ""), ) ) # Prevent the alias from being used again, to prevent infinite recursion used_aliases.append(arg) del verb_aliases[arg] # Return True since one has been found return True # Return False since no verb alias was found return False
def expand_one_verb_alias(sysargs, verb_aliases, used_aliases): """Iterate through sysargs looking for expandable verb aliases. When a verb alias is found, sysargs is modified to effectively expand the alias. The alias is removed from verb_aliases and added to used_aliases. After finding and expanding an alias, this function returns True. If no alias is found to be expanded, this function returns False. """ cmd = os.path.basename(sys.argv[0]) for index, arg in enumerate(sysargs): if arg.startswith('-'): # Not a verb, continue through the arguments continue if arg in used_aliases: print(fmt( "@!@{gf}==>@| Expanding alias '@!@{yf}" + arg + "@|' was previously expanded, ignoring this time to prevent infinite recursion." )) if arg in verb_aliases: before = [] if index == 0 else sysargs[:index - 1] after = [] if index == len(sysargs) else sysargs[index + 1:] sysargs[:] = before + verb_aliases[arg] + after print(fmt( "@!@{gf}==>@| Expanding alias " "'@!@{yf}{alias}@|' " "from '@{yf}{before} @!{alias}@{boldoff}{after}@|' " "to '@{yf}{before} @!{expansion}@{boldoff}{after}@|'" ).format( alias=arg, expansion=' '.join([cmd_quote(aarg) for aarg in verb_aliases[arg]]), before=' '.join([cmd] + before), after=(' '.join([''] + after) if after else '') )) # Prevent the alias from being used again, to prevent infinite recursion used_aliases.append(arg) del verb_aliases[arg] # Return True since one has been found return True # Return False since no verb alias was found return False
def on_stdout_received(self, data): lines = data.decode().splitlines() clines = [] for line in lines: match = re.match(r'(.*): (\d+) tests, (\d+) errors, (\d+) failures, (\d+) skipped', line) if match: line = fmt('@!{}@|: {} tests, @{rf}{} errors@|, @{rf}{} failures@|, @{kf}{} skipped@|') line = line.format(*match.groups()) clines.append(line) cdata = '\n'.join(clines) + '\n' super(CatkinTestResultsIOBufferProtocol, self).on_stdout_received(cdata.encode())
def create_package_job(context, package, package_path, deps): docs_space = os.path.join(context.build_space_abs, '..', 'docs', package.name) docs_build_space = os.path.join(context.build_space_abs, 'docs', package.name) package_path_abs = os.path.join(context.source_space_abs, package_path) # Load rosdoc config, if it exists. rosdoc_yaml_path = os.path.join(package_path_abs, 'rosdoc.yaml') if os.path.exists(rosdoc_yaml_path): with open(rosdoc_yaml_path) as f: rosdoc_conf = yaml.load(f) else: if os.path.exists(os.path.join(package_path_abs, 'src')) or \ os.path.exists(os.path.join(package_path_abs, 'include')): rosdoc_conf = [{'builder': 'doxygen'}] else: rosdoc_conf = [] stages = [] # Create package docs spaces. stages.append(FunctionStage('mkdir_docs_build_space', makedirs, path=docs_build_space)) # Generate msg/srv/action docs with package summary page. stages.append(FunctionStage('generate_messages', generate_messages, package=package, package_path=package_path, output_path=docs_build_space)) stages.append(FunctionStage('generate_services', generate_services, package=package, package_path=package_path, output_path=docs_build_space)) stages.append(FunctionStage('generate_package_summary', generate_package_summary, package=package, package_path=package_path_abs, rosdoc_conf=rosdoc_conf, output_path=docs_build_space)) # Add steps to run native doc generators, as appropriate. This has to happen after # the package summary generates, as we're going to override the subdirectory index.html # files generated by that sphinx run. for conf in rosdoc_conf: try: stages.extend(getattr(builders, conf['builder'])( conf, package, deps, docs_space, package_path_abs, docs_build_space)) except AttributeError: log(fmt("[document] @!@{yf}Warning:@| Skipping unrecognized rosdoc builder [%s] for package [%s]" % (conf['builder'], package.name))) return Job(jid=package.name, deps=deps, env={}, stages=stages)
def colorize_cmake(self, line): """Colorizes output from CMake This also prepends the source path to the locations of warnings and errors. :param line: one, new line terminated, line from `cmake` which needs coloring. :type line: str """ # return line cline = sanitize(line).rstrip() if len(cline.strip()) > 0: for p, r, f in self.filters: match = p.match(cline) if match is not None: cline = fmt(r, reset=False) if f is not None: cline = cline.format(*f(match.groups())) else: cline = cline.format(*match.groups()) break return cline + '\r\n'
from catkin_tools.common import terminal_width from catkin_tools.common import wide_log from catkin_tools.notifications import notify from catkin_tools.terminal_color import fmt from catkin_tools.terminal_color import sanitize from catkin_tools.terminal_color import ColorMapper from catkin_tools.execution import job_server # This map translates more human reable format strings into colorized versions _color_translation_map = { # 'output': 'colorized_output' '': fmt('@!' + sanitize('') + '@|'), # Job starting "Starting >>> {:<{}}": fmt("Starting @!@{gf}>>>@| @!@{cf}{:<{}}@|"), # Job finishing "Finished <<< {:<{}} [ {} ]": fmt("@!@{kf}Finished@| @{gf}<<<@| @{cf}{:<{}}@| [ @{yf}{}@| ]"), "Failed <<< {:<{}} [ {} ]": fmt("@!@{rf}Failed@| @{rf}<<<@| @{cf}{:<{}}@| [ @{yf}{}@| ]"), # Job abandoning "Abandoned <<< {:<{}} [ {} ]": fmt("@!@{rf}Abandoned@| @{rf}<<<@| @{cf}{:<{}}@| [ @{yf}{}@| ]"), "Depends on failed job {}":
def main(sysargs=None): # Initialize config try: initialize_config() except RuntimeError as exc: sys.exit("Failed to initialize config: {0}".format(exc)) # Create a top level parser parser = argparse.ArgumentParser(description="catkin command", formatter_class=argparse.RawDescriptionHelpFormatter) add = parser.add_argument add('-a', '--list-aliases', action="store_true", default=False, help="lists the current verb aliases and then quits, all other arguments are ignored") add('--test-colors', action='store_true', default=False, help="prints a color test pattern to the screen and then quits, all other arguments are ignored") # Generate a list of verbs available verbs = list_verbs() # Create the subparsers for each verb and collect the argument preprocessors argument_preprocessors = create_subparsers(parser, verbs) # Get verb aliases verb_aliases = get_verb_aliases() # Setup sysargs sysargs = sys.argv[1:] if sysargs is None else sysargs cmd = os.path.basename(sys.argv[0]) # Check for --test-colors for arg in sysargs: if arg == '--test-colors': test_colors() sys.exit(0) if not arg.startswith('-'): break # Check for --list-aliases for arg in sysargs: if arg == '--list-aliases' or arg == '-a': for alias in sorted(list(verb_aliases.keys())): print("{0}: {1}".format(alias, verb_aliases[alias])) sys.exit(0) if not arg.startswith('-'): break # Do alias expansion expanding_verb_aliases = True used_aliases = [] while expanding_verb_aliases: expanding_verb_aliases = False for index, arg in enumerate(sysargs): if not arg.startswith('-'): if arg in used_aliases: print(fmt( "@!@{gf}==>@| Expanding alias '@!@{yf}" + arg + "@|' was previously expanded, ignoring this time to prevent infinite recursion." )) if arg in verb_aliases: before = [] if index == 0 else sysargs[:index - 1] after = [] if index == len(sysargs) else sysargs[index + 1:] sysargs = before + verb_aliases[arg].split() + after print(fmt( "@!@{gf}==>@| Expanding alias " "'@!@{yf}{alias}@|' " "from '@{yf}{before} @!{alias}@{boldoff}{after}@|' " "to '@{yf}{before} @!{expansion}@{boldoff}{after}@|'" ).format( alias=arg, expansion=verb_aliases[arg], before=' '.join([cmd] + before), after=(' '.join([''] + after) if after else '') )) expanding_verb_aliases = True # Prevent the alias from being used again, to prevent infinite recursion used_aliases.append(arg) del verb_aliases[arg] break # Determine the verb, splitting arguments into pre and post verb verb = None pre_verb_args = [] post_verb_args = [] for index, arg in enumerate(sysargs): # If the arg does not start with a `-` then it is a positional argument # The first positional argument must be the verb if not arg.startswith('-'): verb = arg post_verb_args = sysargs[index + 1:] break # If the `-h` or `--help` option comes before the verb, parse_args if arg in ['-h', '--help']: parser.parse_args(sysargs) # Otherwise it is a pre-verb option pre_verb_args.append(arg) # Error on no verb provided if verb is None: print(parser.format_usage()) sys.exit("Error: No verb provided.") # Error on unknown verb provided if verb not in verbs: print(parser.format_usage()) sys.exit("Error: Unknown verb '{0}' provided.".format(verb)) # First allow the verb's argument preprocessor to strip any args # and return any "extra" information it wants as a dict processed_post_verb_args, extras = argument_preprocessors[verb](post_verb_args) # Then allow argparse to process the left over post-verb arguments along # with the pre-verb arguments and the verb itself args = parser.parse_args(pre_verb_args + [verb] + processed_post_verb_args) # Extend the argparse result with the extras from the preprocessor for key, value in extras.items(): setattr(args, key, value) # Finally call the subparser's main function with the processed args # and the extras which the preprocessor may have returned sys.exit(args.main(args) or 0)
from catkin_tools.common import terminal_width from catkin_tools.common import wide_log from catkin_tools.notifications import notify from catkin_tools.terminal_color import fmt from catkin_tools.terminal_color import sanitize from catkin_tools.terminal_color import ColorMapper from catkin_tools.execution import job_server # This map translates more human reable format strings into colorized versions _color_translation_map = { # 'output': 'colorized_output' '': fmt('@!' + sanitize('') + '@|'), # Job starting "Starting >>> {:<{}}": fmt("Starting @!@{gf}>>>@| @!@{cf}{:<{}}@|"), # Job finishing "Finished <<< {:<{}} [ {} ]": fmt("@!@{kf}Finished@| @{gf}<<<@| @{cf}{:<{}}@| [ @{yf}{}@| ]"), "Failed <<< {:<{}} [ {} ]": fmt("@!@{rf}Failed@| @{rf}<<<@| @{cf}{:<{}}@| [ @{yf}{}@| ]"), # Job abandoning "Abandoned <<< {:<{}} [ {} ]": fmt("@!@{rf}Abandoned@| @{rf}<<<@| @{cf}{:<{}}@| [ @{yf}{}@| ]"),
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module implements many of the colorization functions used by catkin build""" from catkin_tools.terminal_color import ansi from catkin_tools.terminal_color import fmt from catkin_tools.terminal_color import sanitize from catkin_tools.terminal_color import ColorMapper # This map translates more human reable format strings into colorized versions _color_translation_map = { # 'output': 'colorized_output' '': fmt('@!' + sanitize('') + '@|'), "[{package}] ==> '{cmd.cmd_str}' in '{location}'": fmt("[@{cf}{package}@|] @!@{bf}==>@| '@!{cmd.cmd_str}@|' @{kf}@!in@| '@!{location}@|'"), "Starting ==> {package}": fmt("Starting @!@{gf}==>@| @!@{cf}{package}@|"), "[{package}] {msg}": fmt("[@{cf}{package}@|] {msg}"), "[{package}] <== '{cmd.cmd_str}' failed with return code '{retcode}'": fmt("[@{cf}{package}@|] @!@{rf}<==@| '@!{cmd.cmd_str}@|' @{rf}failed with return code@| '@!{retcode}@|'"), "[{package}] <== '{cmd.cmd_str}' finished with return code '{retcode}'": fmt("[@{cf}{package}@|] @{gf}<==@| '@!{cmd.cmd_str}@|' finished with return code '@!{retcode}@|'"),
def document_workspace( context, packages=None, start_with=None, no_deps=False, n_jobs=None, force_color=False, quiet=False, interleave_output=False, no_status=False, limit_status_rate=10.0, no_notify=False, continue_on_failure=False, summarize_build=None ): pre_start_time = time.time() # Get all the packages in the context source space # Suppress warnings since this is a utility function workspace_packages = find_packages(context.source_space_abs, exclude_subspaces=True, warnings=[]) # If no_deps is given, ensure packages to build are provided if no_deps and packages is None: log(fmt("[document] @!@{rf}Error:@| With no_deps, you must specify packages to build.")) return # Find list of packages in the workspace packages_to_be_documented, packages_to_be_documented_deps, all_packages = determine_packages_to_be_built( packages, context, workspace_packages) if not no_deps: # Extend packages to be documented to include their deps packages_to_be_documented.extend(packages_to_be_documented_deps) # Also re-sort try: packages_to_be_documented = topological_order_packages(dict(packages_to_be_documented)) except AttributeError: log(fmt("[document] @!@{rf}Error:@| The workspace packages have a circular " "dependency, and cannot be documented. Please run `catkin list " "--deps` to determine the problematic package(s).")) return # Check the number of packages to be documented if len(packages_to_be_documented) == 0: log(fmt('[document] No packages to be documented.')) # Assert start_with package is in the workspace verify_start_with_option( start_with, packages, all_packages, packages_to_be_documented + packages_to_be_documented_deps) # Remove packages before start_with if start_with is not None: for path, pkg in list(packages_to_be_documented): if pkg.name != start_with: wide_log(fmt("@!@{pf}Skipping@| @{gf}---@| @{cf}{}@|").format(pkg.name)) packages_to_be_documented.pop(0) else: break # Get the names of all packages to be built packages_to_be_documented_names = [p.name for _, p in packages_to_be_documented] packages_to_be_documeted_deps_names = [p.name for _, p in packages_to_be_documented_deps] jobs = [] # Construct jobs for pkg_path, pkg in all_packages: if pkg.name not in packages_to_be_documented_names: continue # Get actual execution deps deps = [ p.name for _, p in get_cached_recursive_build_depends_in_workspace(pkg, packages_to_be_documented) ] jobs.append(create_package_job(context, pkg, pkg_path, deps)) # Special job for post-job summary sphinx step. jobs.append(create_summary_job(context, package_names=packages_to_be_documented_names)) # Queue for communicating status event_queue = Queue() try: # Spin up status output thread status_thread = ConsoleStatusController( 'document', ['package', 'packages'], jobs, n_jobs, [pkg.name for _, pkg in context.packages], [p for p in context.whitelist], [p for p in context.blacklist], event_queue, show_notifications=not no_notify, show_active_status=not no_status, show_buffered_stdout=not quiet and not interleave_output, show_buffered_stderr=not interleave_output, show_live_stdout=interleave_output, show_live_stderr=interleave_output, show_stage_events=not quiet, show_full_summary=(summarize_build is True), pre_start_time=pre_start_time, active_status_rate=limit_status_rate) status_thread.start() # Block while running N jobs asynchronously try: all_succeeded = run_until_complete(execute_jobs( 'document', jobs, None, event_queue, context.log_space_abs, max_toplevel_jobs=n_jobs, continue_on_failure=continue_on_failure, continue_without_deps=False)) except Exception: status_thread.keep_running = False all_succeeded = False status_thread.join(1.0) wide_log(str(traceback.format_exc())) status_thread.join(1.0) return 0 if all_succeeded else 1 except KeyboardInterrupt: wide_log("[document] Interrupted by user!") event_queue.put(None) return 130 # EOWNERDEAD
def clean_packages(context, names_of_packages_to_be_cleaned, clean_dependents, verbose, dry_run): pre_start_time = time.time() # Update the names of packages to be cleaned with dependents packages_to_be_cleaned = determine_packages_to_be_cleaned( context, clean_dependents, names_of_packages_to_be_cleaned) # print(packages_to_be_cleaned) # for path, pkg in packages_to_be_cleaned: # if os.path.exists(os.path.join(context.build_space_abs, pkg.name)): # print("[clean] Cleaning package: %s" % pkg.name) # Construct jobs jobs = [] for path, pkg in packages_to_be_cleaned: # Get all build type plugins clean_job_creators = { ep.name: ep.load()['create_clean_job'] for ep in pkg_resources.iter_entry_points( group='catkin_tools.jobs') } # It's a problem if there aren't any build types available if len(clean_job_creators) == 0: sys.exit( fmt('[clean] @!@{rf}Error:@| No build types available. Please check your catkin_tools installation.' )) # Determine the job parameters clean_job_kwargs = dict( context=context, package=pkg, package_path=path, dependencies=[], # Unused because clean jobs are not parallelized dry_run=dry_run, clean_build=True, clean_devel=True, clean_install=True) # Create the job based on the build type build_type = pkg.get_build_type() if build_type in clean_job_creators: jobs.append(clean_job_creators[build_type](**clean_job_kwargs)) if len(jobs) == 0: print( "[clean] There are no products from the given packages to clean.") return False # Queue for communicating status event_queue = Queue() # Spin up status output thread status_thread = ConsoleStatusController( 'clean', ['package', 'packages'], jobs, 1, [pkg.name for _, pkg in context.packages], [p for p in context.buildlist], [p for p in context.skiplist], event_queue, show_notifications=False, show_active_status=False, show_buffered_stdout=verbose or False, show_buffered_stderr=True, show_live_stdout=False, show_live_stderr=False, show_stage_events=False, show_full_summary=False, pre_start_time=pre_start_time, active_status_rate=10.0) status_thread.start() # Initialize locks (none need to be configured here) locks = {} # Block while running N jobs asynchronously try: ej = execute_jobs('clean', jobs, locks, event_queue, context.log_space_abs, max_toplevel_jobs=1, continue_on_failure=True, continue_without_deps=False) all_succeeded = run_until_complete(ej) except Exception: status_thread.keep_running = False all_succeeded = False status_thread.join(1.0) wide_log(str(traceback.format_exc())) status_thread.join(1.0) return all_succeeded
def run(self): queued_jobs = [] active_jobs = [] completed_jobs = {} failed_jobs = [] warned_jobs = [] cumulative_times = dict() start_times = dict() active_stages = dict() start_time = self.pre_start_time or time.time() last_update_time = time.time() # If the status rate is too low, just disable it if self.active_status_rate < 1E-3: self.show_active_status = False else: update_duration = 1.0 / self.active_status_rate # Disable the wide log padding if the status is disabled if not self.show_active_status: disable_wide_log() while True: # Check if we should stop if not self.keep_running: wide_log( clr('[{}] An internal error occurred!').format(self.label)) return # Write a continuously-updated status line if self.show_active_status: # Try to get an event from the queue (non-blocking) try: event = self.event_queue.get(False) except Empty: # Determine if the status should be shown based on the desired # status rate elapsed_time = time.time() - last_update_time show_status_now = elapsed_time > update_duration if show_status_now: # Print live status (overwrites last line) status_line = clr( '[{} {} s] [{}/{} complete] [{}/{} jobs] [{} queued]' ).format( self.label, format_time_delta_short(time.time() - start_time), len(completed_jobs), len(self.jobs), job_server.running_jobs(), job_server.max_jobs(), len(queued_jobs) + len(active_jobs) - len(active_stages)) # Show failed jobs if len(failed_jobs) > 0: status_line += clr( ' [@!@{rf}{}@| @{rf}failed@|]').format( len(failed_jobs)) # Check load / mem if not job_server.load_ok(): status_line += clr(' [@!@{rf}High Load@|]') if not job_server.mem_ok(): status_line += clr(' [@!@{rf}Low Memory@|]') # Add active jobs if len(active_jobs) == 0: status_line += clr( ' @/@!@{kf}Waiting for jobs...@|') else: active_labels = [] for j, (s, t, p) in active_stages.items(): d = format_time_delta_short( cumulative_times[j] + time.time() - t) if p == '': active_labels.append( clr('[{}:{} - {}]').format(j, s, d)) else: active_labels.append( clr('[{}:{} ({}%) - {}]').format( j, s, p, d)) status_line += ' ' + ' '.join(active_labels) # Print the status line # wide_log(status_line) wide_log(status_line, rhs='', end='\r') sys.stdout.flush() # Store this update time last_update_time = time.time() else: time.sleep( max(0.0, min(update_duration - elapsed_time, 0.01))) # Only continue when no event was received continue else: # Try to get an event from the queue (blocking) try: event = self.event_queue.get(True) except Empty: break # A `None` event is a signal to terminate if event is None: break # Handle the received events eid = event.event_id if 'JOB_STATUS' == eid: queued_jobs = event.data['queued'] active_jobs = event.data['active'] completed_jobs = event.data['completed'] # Check if all jobs have finished in some way if all([ len(event.data[t]) == 0 for t in ['pending', 'queued', 'active'] ]): break elif 'STARTED_JOB' == eid: cumulative_times[event.data['job_id']] = 0.0 wide_log( clr('Starting >>> {:<{}}').format(event.data['job_id'], self.max_jid_length)) elif 'FINISHED_JOB' == eid: duration = format_time_delta( cumulative_times[event.data['job_id']]) if event.data['succeeded']: wide_log( clr('Finished <<< {:<{}} [ {} ]').format( event.data['job_id'], self.max_jid_length, duration)) else: failed_jobs.append(event.data['job_id']) wide_log( clr('Failed <<< {:<{}} [ {} ]').format( event.data['job_id'], self.max_jid_length, duration)) elif 'ABANDONED_JOB' == eid: # Create a human-readable reason string if 'DEP_FAILED' == event.data['reason']: direct = event.data['dep_job_id'] == event.data[ 'direct_dep_job_id'] if direct: reason = clr('Depends on failed job {}').format( event.data['dep_job_id']) else: reason = clr('Depends on failed job {} via {}').format( event.data['dep_job_id'], event.data['direct_dep_job_id']) elif 'PEER_FAILED' == event.data['reason']: reason = clr('Unrelated job failed') elif 'MISSING_DEPS' == event.data['reason']: reason = clr('Depends on unknown jobs: {}').format( ', '.join([ fmt('@{boldon}{}@{boldoff}', reset=False).format(jid) for jid in event.data['dep_ids'] ])) wide_log( clr('Abandoned <<< {:<{}} [ {} ]').format( event.data['job_id'], self.max_jid_length, reason)) elif 'STARTED_STAGE' == eid: active_stages[event.data['job_id']] = [ event.data['stage_label'], event.time, '' ] start_times[event.data['job_id']] = event.time if self.show_stage_events: wide_log( clr('Starting >> {}:{}').format( event.data['job_id'], event.data['stage_label'])) elif 'STAGE_PROGRESS' == eid: active_stages[event.data['job_id']][2] = event.data['percent'] elif 'SUBPROCESS' == eid: if self.show_stage_events: wide_log( clr('Subprocess > {}:{} `{}`').format( event.data['job_id'], event.data['stage_label'], event.data['stage_repro'])) elif 'FINISHED_STAGE' == eid: # Get the stage duration duration = event.time - start_times[event.data['job_id']] cumulative_times[event.data['job_id']] += duration # This is no longer the active stage for this job del active_stages[event.data['job_id']] header_border = None header_border_file = sys.stdout header_title = None header_title_file = sys.stdout lines = [] footer_title = None footer_title_file = sys.stdout footer_border = None footer_border_file = sys.stdout # Generate headers / borders for output if event.data['succeeded']: footer_title = clr('Finished << {}:{}').format( event.data['job_id'], event.data['stage_label']) if len(event.data['stderr']) > 0: # Mark that this job warned about something if event.data['job_id'] not in warned_jobs: warned_jobs.append(event.data['job_id']) # Output contains warnings header_border = clr('@!@{yf}' + '_' * (terminal_width() - 1) + '@|') header_border_file = sys.stderr header_title = clr('Warnings << {}:{} {}').format( event.data['job_id'], event.data['stage_label'], event.data['logfile_filename']) header_title_file = sys.stderr footer_border = clr('@{yf}' + '.' * (terminal_width() - 1) + '@|') footer_border_file = sys.stderr else: # Normal output, no warnings header_title = clr('Output << {}:{} {}').format( event.data['job_id'], event.data['stage_label'], event.data['logfile_filename']) # Don't print footer title if not self.show_stage_events: footer_title = None else: # Output contains errors header_border = clr('@!@{rf}' + '_' * (terminal_width() - 1) + '@|') header_border_file = sys.stderr header_title = clr('Errors << {}:{} {}').format( event.data['job_id'], event.data['stage_label'], event.data['logfile_filename']) header_title_file = sys.stderr footer_border = clr('@{rf}' + '.' * (terminal_width() - 1) + '@|') footer_border_file = sys.stderr footer_title = clr( 'Failed << {}:{:<{}} [ Exited with code {} ]').format( event.data['job_id'], event.data['stage_label'], max( 0, self.max_jid_length - len(event.data['job_id'])), event.data['retcode']) footer_title_file = sys.stderr lines_target = sys.stdout if self.show_buffered_stdout: if len(event.data['interleaved']) > 0: lines = [ line + '\n' for line in event.data['interleaved'].splitlines() if (self.show_compact_io is False or len(line.strip()) > 0) ] else: header_border = None header_title = None footer_border = None elif self.show_buffered_stderr: if len(event.data['stderr']) > 0: lines = [ line + '\n' for line in event.data['stderr'].splitlines() if (self.show_compact_io is False or len(line.strip()) > 0) ] lines_target = sys.stderr else: header_border = None header_title = None footer_border = None if len(lines) > 0: if self.show_repro_cmd: if event.data['repro'] is not None: lines.append( clr('@!@{kf}{}@|\n').format( event.data['repro'])) # Print the output if header_border: wide_log(header_border, file=header_border_file) if header_title: wide_log(header_title, file=header_title_file) if len(lines) > 0: wide_log(''.join(lines), end='\n', file=lines_target) if footer_border: wide_log(footer_border, file=footer_border_file) if footer_title: wide_log(footer_title, file=footer_title_file) elif 'STDERR' == eid: if self.show_live_stderr and len(event.data['data']) > 0: wide_log(self.format_interleaved_lines(event.data), file=sys.stderr) elif 'STDOUT' == eid: if self.show_live_stdout and len(event.data['data']) > 0: wide_log(self.format_interleaved_lines(event.data)) elif 'MESSAGE' == eid: wide_log(event.data['msg']) # Print the full summary if self.show_full_summary: self.print_exec_summary(completed_jobs, warned_jobs, failed_jobs) # Print a compact summary if self.show_summary or self.show_full_summary: self.print_compact_summary(completed_jobs, warned_jobs, failed_jobs) # Print final runtime wide_log( clr('[{}] Runtime: {} total.').format( self.label, format_time_delta(time.time() - start_time)))
def create_package_job(context, package, package_path, deps, doc_deps): docs_space = os.path.join(context.docs_space_abs, package.name) docs_build_space = os.path.join(context.build_space_abs, 'docs', package.name) package_path_abs = os.path.join(context.source_space_abs, package_path) package_meta_path = context.package_metadata_path(package) # Load rosdoc config, if it exists. rosdoc_yaml_path = os.path.join(package_path_abs, 'rosdoc.yaml') for export in package.exports: if export.tagname == "rosdoc": config = export.attributes.get('config', '') if config: rosdoc_yaml_path_temp = os.path.join(package_path_abs, config) if os.path.isfile(rosdoc_yaml_path_temp): # Stop if configuration is found which exists rosdoc_yaml_path = rosdoc_yaml_path_temp break if os.path.isfile(rosdoc_yaml_path): with open(rosdoc_yaml_path) as f: rosdoc_conf = yaml.full_load(f) else: if os.path.isdir(os.path.join(package_path_abs, 'src')) or \ os.path.isdir(os.path.join(package_path_abs, 'include')): rosdoc_conf = [{'builder': 'doxygen'}] else: rosdoc_conf = [] stages = [] # Create package docs spaces. stages.append( FunctionStage('mkdir_docs_build_space', makedirs, path=docs_build_space)) # Generate msg/srv/action docs with package summary page. stages.append( FunctionStage('generate_messages', generate_messages, package=package, package_path=package_path, output_path=docs_build_space)) stages.append( FunctionStage('generate_services', generate_services, package=package, package_path=package_path, output_path=docs_build_space)) stages.append( FunctionStage('generate_package_summary', generate_package_summary, package=package, package_path=package_path_abs, rosdoc_conf=rosdoc_conf, output_path=docs_build_space)) # Cache document config stages.append( FunctionStage('cache_rosdoc_config', yaml_dump_file, contents=rosdoc_conf, dest_path=os.path.join(package_meta_path, 'rosdoc.yaml'))) job_env = {} # Add steps to run native doc generators, as appropriate. This has to happen after # the package summary generates, as we're going to override the subdirectory index.html # files generated by that sphinx run. for conf in rosdoc_conf: try: builder = conf['builder'] if builder == 'doxygen': docs_space = os.path.realpath(docs_space) docs_build_space = os.path.realpath(docs_build_space) package_path_abs = os.path.realpath(package_path_abs) stages.extend( getattr(builders, builder)(conf, package, deps, doc_deps, docs_space, package_path_abs, docs_build_space, job_env)) except AttributeError: log( fmt("[document] @!@{yf}Warning:@| Skipping unrecognized rosdoc builder [%s] for package [%s]" % (conf['builder'], package.name))) return Job(jid=package.name, deps=deps, env=job_env, stages=stages)
def main(opts): try: sysargs = sys.argv[1:] # Deprecated options deprecated_args = [ ('--blacklist', '--skiplist',), ('--no-blacklist', '--no-skiplist'), ('--whitelist', '--buildlist'), ('--no-whitelist', '--no-buildlist')] used_deprecated_args = [(old, new) for old, new in deprecated_args if old in sysargs] if any(used_deprecated_args): print(fmt('@!@{rf}WARNING:@| Some arguments are deprecated and will be' ' removed in a future release.\n')) print('Please switch to using their replacements as follows:') for old_arg, new_arg in used_deprecated_args: print(" - '{}' is deprecated, use '{}' instead".format(old_arg, new_arg)) print() # Determine if the user is trying to perform some action, in which # case, the workspace should be automatically initialized ignored_opts = ['main', 'verb'] actions = [v for k, v in vars(opts).items() if k not in ignored_opts] no_action = not any(actions) # Handle old argument names necessary for Context.load if opts.buildlist is not None: opts.whitelist = opts.buildlist del opts.buildlist if opts.skiplist is not None: opts.blacklist = opts.skiplist del opts.skiplist # Try to find a metadata directory to get context defaults # Otherwise use the specified directory context = Context.load( opts.workspace, opts.profile, opts, append=opts.append_args, remove=opts.remove_args) do_init = opts.init or not no_action summary_notes = [] if not context.initialized() and do_init: summary_notes.append(clr('@!@{cf}Initialized new catkin workspace in `{}`@|').format(context.workspace)) if context.initialized() or do_init: Context.save(context) if opts.mkdirs and not context.source_space_exists(): os.makedirs(context.source_space_abs) print(context.summary(notes=summary_notes)) except IOError as exc: # Usually happens if workspace is already underneath another catkin_tools workspace print(clr("@!@{rf}Error:@| Could not configure catkin workspace: {}").format(exc), file=sys.stderr) return 1 return 0
def test_workspace(context, packages=None, tests=None, list_tests=False, start_with=None, n_jobs=None, force_color=False, quiet=False, interleave_output=False, no_status=False, limit_status_rate=10.0, no_notify=False, summarize_build=None): pre_start_time = time.time() # Get our list of packages based on what's in the source space and our # command line switches. packages_to_test = get_packages_to_test(context, packages) if len(packages_to_test) == 0: log(fmt('[test] No tests in the available packages.')) # Get the full list of tests available in those packages, as configured. packages_tests = get_packages_tests(context, packages_to_test) print packages_tests if list_tests: # Don't build or run, just list available targets. log(fmt('[test] Tests available in workspace packages:')) for package, tests in sorted(packages_tests): log(fmt('[test] * %s' % package.name)) for test in sorted(tests): log(fmt('[test] - %s' % test)) return 0 else: jobs = [] # Construct jobs for running tests. for package, package_tests in packages_tests: jobs.append(create_package_job(context, package, package_tests)) package_names = [p[0].name for p in packages_tests] jobs.append(create_results_check_job(context, package_names)) # Queue for communicating status. event_queue = Queue() try: # Spin up status output thread. status_thread = ConsoleStatusController( 'test', ['package', 'packages'], jobs, n_jobs, [pkg.name for _, pkg in context.packages], [p for p in context.whitelist], [p for p in context.blacklist], event_queue, show_notifications=not no_notify, show_active_status=not no_status, show_buffered_stdout=not quiet and not interleave_output, show_buffered_stderr=not interleave_output, show_live_stdout=interleave_output, show_live_stderr=interleave_output, show_stage_events=not quiet, show_full_summary=(summarize_build is True), pre_start_time=pre_start_time, active_status_rate=limit_status_rate) status_thread.start() # Block while running N jobs asynchronously try: all_succeeded = run_until_complete( execute_jobs('test', jobs, None, event_queue, context.log_space_abs, max_toplevel_jobs=n_jobs)) except Exception: status_thread.keep_running = False all_succeeded = False status_thread.join(1.0) wide_log(str(traceback.format_exc())) status_thread.join(1.0) return 0 if all_succeeded else 1 except KeyboardInterrupt: wide_log("[test] Interrupted by user!") event_queue.put(None) return 130 # EOWNERDEAD
# distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module implements many of the colorization functions used by catkin clean""" from catkin_tools.terminal_color import ansi from catkin_tools.terminal_color import fmt from catkin_tools.terminal_color import sanitize from catkin_tools.terminal_color import ColorMapper # This map translates more human reable format strings into colorized versions _color_translation_map = { # 'output': 'colorized_output' '': fmt('@!' + sanitize('') + '@|'), "[{package}] ==> '{cmd.cmd_str}' in '{location}'": fmt("[@{cf}{package}@|] @!@{bf}==>@| '@!{cmd.cmd_str}@|' @{kf}@!in@| '@!{location}@|'" ), "Starting ==> {package}": fmt("Starting @!@{gf}==>@| @!@{cf}{package}@|"), "[{package}] {msg}": fmt("[@{cf}{package}@|] {msg}"), "[{package}] <== '{cmd.cmd_str}' failed with return code '{retcode}'": fmt("[@{cf}{package}@|] @!@{rf}<==@| '@!{cmd.cmd_str}@|' @{rf}failed with return code@| '@!{retcode}@|'" ), "[{package}] <== '{cmd.cmd_str}' finished with return code '{retcode}'": fmt("[@{cf}{package}@|] @{gf}<==@| '@!{cmd.cmd_str}@|' finished with return code '@!{retcode}@|'" ), "Finished <== {package:<": fmt("@!@{kf}Finished@| @{gf}<==@| @{cf}{package:<").rstrip(ansi('reset')),
def main(sysargs=None): # Initialize config try: initialize_config() except RuntimeError as exc: sys.exit("Failed to initialize config: {0}".format(exc)) # Create a top level parser parser = argparse.ArgumentParser(description="catkin command") add = parser.add_argument add('-a', '--list-aliases', action="store_true", default=False, help="lists the current verb aliases and then quits, all other arguments are ignored") add('--test-colors', action='store_true', default=False, help="prints a color test pattern to the screen and then quits, all other arguments are ignored") # Generate a list of verbs available verbs = list_verbs() # Create the subparsers for each verb and collect the argument preprocessors argument_preprocessors = create_subparsers(parser, verbs) # Get verb aliases verb_aliases = get_verb_aliases() # Setup sysargs sysargs = sys.argv[1:] if sysargs is None else sysargs cmd = os.path.basename(sys.argv[0]) # Check for --test-colors for arg in sysargs: if arg == '--test-colors': test_colors() sys.exit(0) if not arg.startswith('-'): break # Check for --list-aliases for arg in sysargs: if arg == '--list-aliases' or arg == '-a': for alias in sorted(list(verb_aliases.keys())): print("{0}: {1}".format(alias, verb_aliases[alias])) sys.exit(0) if not arg.startswith('-'): break # Do alias expansion expanding_verb_aliases = True used_aliases = [] while expanding_verb_aliases: expanding_verb_aliases = False for index, arg in enumerate(sysargs): if not arg.startswith('-'): if arg in used_aliases: print(fmt( "@!@{gf}==>@| Expanding alias '@!@{yf}" + arg + "@|' was previously expanded, ignoring this time to prevent infinite recursion." )) if arg in verb_aliases: before = [] if index == 0 else sysargs[:index - 1] after = [] if index == len(sysargs) else sysargs[index + 1:] sysargs = before + verb_aliases[arg].split() + after print(fmt( "@!@{gf}==>@| Expanding alias " "'@!@{yf}{alias}@|' " "from '@{yf}{before} @!{alias}@{boldoff}{after}@|' " "to '@{yf}{before} @!{expansion}@{boldoff}{after}@|'" ).format( alias=arg, expansion=verb_aliases[arg], before=' '.join([cmd] + before), after=(' '.join([''] + after) if after else '') )) expanding_verb_aliases = True # Prevent the alias from being used again, to prevent infinite recursion used_aliases.append(arg) del verb_aliases[arg] break # Determine the verb, splitting arguments into pre and post verb verb = None pre_verb_args = [] post_verb_args = [] for index, arg in enumerate(sysargs): # If the arg does not start with a `-` then it is a positional argument # The first positional argument must be the verb if not arg.startswith('-'): verb = arg post_verb_args = sysargs[index + 1:] break # If the `-h` or `--help` option comes before the verb, parse_args if arg in ['-h', '--help']: parser.parse_args(sysargs) # Otherwise it is a pre-verb option pre_verb_args.append(arg) # Error on no verb provided if verb is None: print(parser.format_usage()) sys.exit("Error: No verb provided.") # Error on unknown verb provided if verb not in verbs: print(parser.format_usage()) sys.exit("Error: Unknown verb '{0}' provided.".format(verb)) # First allow the verb's argument preprocessor to strip any args # and return any "extra" information it wants as a dict processed_post_verb_args, extras = argument_preprocessors[verb](post_verb_args) # Then allow argparse to process the left over post-verb arguments along # with the pre-verb arguments and the verb itself args = parser.parse_args(pre_verb_args + [verb] + processed_post_verb_args) # Extend the argparse result with the extras from the preprocessor for key, value in extras.items(): setattr(args, key, value) # Finally call the subparser's main function with the processed args # and the extras which the preprocessor may have returned sys.exit(args.main(args) or 0)