def push_changes(base_path, vcs_type, tag_name, dry_run=False): commands = [] # push changes to the repository cmd = [_find_executable(vcs_type), 'push'] if vcs_type == 'git': cmd.extend([get_git_remote(base_path), get_git_branch(base_path)]) commands.append(cmd) if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to push changes to the repository: %s\n\nYou need to manually push the changes/tag to the repository.' % str(e))) # push tags to the repository if vcs_type in ['git']: cmd = [_find_executable(vcs_type), 'push', get_git_remote(base_path), tag_name] commands.append(cmd) if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to push tag to the repository: %s\n\nYou need to manually push the new tag to the repository.' % str(e))) return commands
def prompt_continue(msg, default): """Prompt the user for continuation.""" if default: msg += fmt(' @{yf}[Y/n]@{reset}?') else: msg += fmt(' @{yf}[y/N]@{reset}?') while True: _flush_stdin() try: response = raw_input(msg) except EOFError: response = '' if not response: response = 'y' if default else 'n' else: response = response.lower() if response in ['y', 'n']: return response == 'y' print( fmt( "@{yf}Response '@{boldon}%s@{boldoff}' was not recognized, please use one of the following options: %s" % (response, ', '.join([('@{boldon}%s@{boldoff}' % x) for x in ['y', 'Y', 'n', 'N']])) ), file=sys.stderr)
def prompt_continue(msg, default): """Prompt the user for continuation.""" if default: msg += fmt(' @{yf}[Y/n]@{reset}?') else: msg += fmt(' @{yf}[y/N]@{reset}?') while True: _flush_stdin() try: response = raw_input(msg) except EOFError: response = '' if not response: response = 'y' if default else 'n' else: response = response.lower() if response in ['y', 'n']: return response == 'y' print(fmt( "@{yf}Response '@{boldon}%s@{boldoff}' was not recognized, please use one of the following options: %s" % (response, ', '.join([('@{boldon}%s@{boldoff}' % x) for x in ['y', 'Y', 'n', 'N']]))), file=sys.stderr)
def push_changes(base_path, vcs_type, dry_run=False): commands = [] # push changes to the repository cmd = [_find_executable(vcs_type), 'push'] if vcs_type == 'git': cmd.extend(get_git_remote_and_branch(base_path)) commands.append(cmd) if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to push changes to the repository: %s\n\nYou need to manually push the changes/tag to the repository.' % str(e))) # push tags to the repository if vcs_type in ['git']: cmd = [_find_executable(vcs_type), 'push', '--tags'] commands.append(cmd) if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to push tag to the repository: %s\n\nYou need to manually push the new tag to the repository.' % str(e))) return commands
def tag_repository(base_path, vcs_type, tag_name, has_tag_prefix, dry_run=False): if vcs_type in ['bzr', 'git', 'hg']: cmd = [_find_executable(vcs_type), 'tag', tag_name] elif vcs_type == 'svn': svn_url = vcs_remotes(base_path, 'svn')[5:] if os.path.basename(svn_url) == 'trunk': # tag "trunk" base_url = os.path.dirname(svn_url) elif os.path.basename(os.path.dirname(svn_url)) == 'branches': # tag a direct subfolder of "branches" base_url = os.path.dirname(os.path.dirname(svn_url)) elif svn_url.rfind('/trunk/') != -1: # tag any subfolder of trunk but require a tag prefix if not has_tag_prefix: raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique')) base_url = svn_url[:svn_url.rfind('/trunk/')] elif svn_url.rfind('/branches/') != -1: # tag any subfolder of trunk but require a tag prefix if not has_tag_prefix: raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique')) base_url = svn_url[:svn_url.rfind('/branches/')] else: raise RuntimeError(fmt("@{rf}Could not determine base URL of SVN repository '%s'" % svn_url)) tag_url = '%s/tags/%s' % (base_url, tag_name) cmd = ['svn', 'cp', '-m', '"tagging %s"' % tag_name, svn_url, tag_url] else: assert False, 'Unknown vcs type: %s' % vcs_type if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt("@{rf}Failed to tag repository: %s" % str(e))) return cmd
def tag_repository(base_path, vcs_type, tag_name, has_tag_prefix, dry_run=False): if vcs_type in ['bzr', 'git', 'hg']: cmd = [_find_executable(vcs_type), 'tag', tag_name] elif vcs_type == 'svn': svn_url = vcs_remotes(base_path, 'svn')[5:] if os.path.basename(svn_url) == 'trunk': # tag "trunk" base_url = os.path.dirname(svn_url) elif os.path.basename(os.path.dirname(svn_url)) == 'branches': # tag a direct subfolder of "branches" base_url = os.path.dirname(os.path.dirname(svn_url)) elif svn_url.rfind('/trunk/') != -1: # tag any subfolder of trunk but require a tag prefix if not has_tag_prefix: raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique')) base_url = svn_url[:svn_url.rfind('/trunk/')] elif svn_url.rfind('/branches/') != -1: # tag any subfolder of trunk but require a tag prefix if not has_tag_prefix: raise RuntimeError(fmt('@{rf}When tagging a subfolder you must use --tag-prefix to make your tag name unique')) base_url = svn_url[:svn_url.rfind('/branches/')] else: raise RuntimeError(fmt("@{rf}Could not determine base URL of SVN repository '%s'" % svn_url)) tag_url = '%s/tags/%s' % (base_url, tag_name) cmd = ['svn', 'cp', '-m', '"tagging %s"' % tag_name, svn_url, tag_url] else: assert False, 'Unknown vcs type: %s' % vcs_type if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to tag repository: %s' % str(e))) return cmd
def test_terminal_colors(self): # since other test might disable ansi colors # we need to ensure it is enabled enable_ANSI_colors() assert ansi('reset') != '' test = '@_This is underlined@|' rslt = '\033[4mThis is underlined\033[0m\033[0m' assert fmt(test) == rslt test = 'This has bad stuff @! @/ @_ @| OK!' test = sanitize(test) rslt = 'This has bad stuff @! @/ @_ @| OK!\033[0m' assert fmt(test) == rslt test = char(2018) test = sanitize(test) rslt = char(2018) assert test == rslt
def has_changes(base_path, path, vcs_type): cmd = [_find_executable(vcs_type), 'diff', path] try: output = subprocess.check_output(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt("@{rf}Failed to check if '@{boldon}%s@{boldoff}' has modifications: %s" % (path, str(e)))) return output.decode('utf-8').rstrip() != ''
def test_terminal_colors(self): # since other test might disable ansi colors # we need to ensure it is enabled enable_ANSI_colors() assert ansi('reset') != '' test = '@_This is underlined@|' rslt = '\033[4mThis is underlined\033[0m\033[0m' assert fmt(test) == rslt test = 'This has bad stuff @! @/ @_ @| OK!' test = sanitize(test) rslt = 'This has bad stuff @! @/ @_ @| OK!\033[0m' assert fmt(test) == rslt test = char(2018) test = sanitize(test) rslt = char(2018) assert test == rslt
def get_git_branch(base_path): cmd_branch = [_find_executable('git'), 'rev-parse', '--abbrev-ref', 'HEAD'] try: branch = subprocess.check_output(cmd_branch, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Could not determine git branch: %s' % str(e))) return branch.decode('utf-8').rstrip()
def get_git_remote_and_branch(base_path): cmd_branch = [_find_executable('git'), 'rev-parse', '--abbrev-ref', 'HEAD'] try: branch = subprocess.check_output(cmd_branch, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Could not determine git branch: %s' % str(e))) branch = branch.decode('utf-8').rstrip() cmd_remote = [_find_executable('git'), 'config', '--get', 'branch.%s.remote' % branch] try: remote = subprocess.check_output(cmd_remote, cwd=base_path) except subprocess.CalledProcessError as e: msg = 'Could not determine git remote: %s' % str(e) msg += "\n\nMay be the branch '%s' is not tracking a remote branch?" % branch raise RuntimeError(fmt('@{rf}%s' % msg)) remote = remote.decode('utf-8').rstrip() return [remote, branch]
def try_repo_push(base_path, vcs_type): if vcs_type in ['git']: print('Trying to push to remote repository (dry run)...') cmd = [_find_executable(vcs_type), 'push'] if vcs_type == 'git': cmd.extend(['-n'] + [get_git_remote(base_path), get_git_branch(base_path)]) try: subprocess.check_call(cmd, cwd=base_path) except (subprocess.CalledProcessError, RuntimeError) as e: raise RuntimeError(fmt('@{rf}Failed to dry push to repository: %s' % str(e)))
def commit_files(base_path, vcs_type, packages, packages_with_changelogs, message, dry_run=False): cmd = [_find_executable(vcs_type), 'commit', '-m', message] cmd += [os.path.join(p, PACKAGE_MANIFEST_FILENAME) for p in packages.keys()] cmd += [path for path, _, _ in packages_with_changelogs.values()] if not dry_run: try: subprocess.check_call(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to commit package.xml files: %s' % str(e))) return cmd
def get_git_remote(base_path): branch = get_git_branch(base_path) cmd_remote = [_find_executable('git'), 'config', '--get', 'branch.%s.remote' % branch] try: remote = subprocess.check_output(cmd_remote, cwd=base_path) except subprocess.CalledProcessError as e: msg = 'Could not determine git remote: %s' % str(e) msg += "\n\nMay be the branch '%s' is not tracking a remote branch?" % branch raise RuntimeError(fmt('@{rf}%s' % msg)) return remote.decode('utf-8').rstrip()
def check_clean_working_copy(base_path, vcs_type): if vcs_type in ['bzr', 'hg', 'svn']: cmd = [_find_executable(vcs_type), 'status'] elif vcs_type in ['git']: cmd = [_find_executable(vcs_type), 'status', '-s', '-u'] else: assert False, 'Unknown vcs type: %s' % vcs_type try: output = subprocess.check_output(cmd, cwd=base_path).rstrip() except subprocess.CalledProcessError as e: raise RuntimeError( fmt("@{rf}Failed to check working copy state: %s" % str(e))) if output != '': print(output) return False return True
def check_clean_working_copy(base_path, vcs_type): if vcs_type in ['bzr', 'hg', 'svn']: cmd = [_find_executable(vcs_type), 'status'] elif vcs_type in ['git']: cmd = [_find_executable(vcs_type), 'status', '-s', '-u'] else: assert False, 'Unknown vcs type: %s' % vcs_type try: output = subprocess.check_output(cmd, cwd=base_path) except subprocess.CalledProcessError as e: raise RuntimeError(fmt('@{rf}Failed to check working copy state: %s' % str(e))) output = output.decode('utf-8').rstrip() if output != '': print(output) return False return True
def colorize_line(line): cline = sanitize(line) cline = cline.replace( '-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~', '-- @{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~@|' ) if line.startswith('-- ~~'): # -- ~~ - cline = cline.replace('~~ ', '@{pf}~~ @|') cline = cline.replace(' - ', ' - @!@{bf}') cline = cline.replace('(', '@|(') cline = cline.replace('(plain cmake)', '@|(@{rf}plain cmake@|)') cline = cline.replace('(unknown)', '@|(@{yf}unknown@|)') if line.startswith('-- +++'): # -- +++ add_subdirectory(package) cline = cline.replace('+++', '@!@{gf}+++@|') cline = cline.replace('kin package: \'', 'kin package: \'@!@{bf}') cline = cline.replace(')', '@|)') cline = cline.replace('\'\n', '@|\'\n') cline = cline.replace('cmake package: \'', 'cmake package: \'@!@{bf}') cline = cline.replace('\'\n', '@|\'\n') if line.startswith('-- ==>'): cline = cline.replace('-- ==>', '-- @!@{bf}==>@|') if line.lower().startswith('warning'): # WARNING cline = ansi('yf') + cline if line.startswith('CMake Warning'): # CMake Warning... cline = cline.replace('CMake Warning', '@{yf}@!CMake Warning@|') if line.startswith('ERROR:'): # ERROR: cline = cline.replace('ERROR:', '@!@{rf}ERROR:@|') if line.startswith('CMake Error'): # CMake Error... cline = cline.replace('CMake Error', '@{rf}@!CMake Error@|') if line.startswith('Call Stack (most recent call first):'): # CMake Call Stack cline = cline.replace('Call Stack (most recent call first):', '@{cf}@_Call Stack (most recent call first):@|') return fmt(cline)
def build_workspace_isolated( workspace='.', sourcespace=None, buildspace=None, develspace=None, installspace=None, merge=False, install=False, force_cmake=False, colorize=True, build_packages=None, ignore_packages=None, quiet=False, cmake_args=None, make_args=None, catkin_make_args=None, continue_from_pkg=False, only_pkg_with_deps=None, destdir=None, use_ninja=False, use_nmake=False, use_gmake=False, override_build_tool_check=False ): ''' Runs ``cmake``, ``make`` and optionally ``make install`` for all catkin packages in sourcespace_dir. It creates several folders in the current working directory. For non-catkin packages it runs ``cmake``, ``make`` and ``make install`` for each, installing it to the devel space or install space if the ``install`` option is specified. :param workspace: path to the current workspace, ``str`` :param sourcespace: workspace folder containing catkin packages, ``str`` :param buildspace: path to build space location, ``str`` :param develspace: path to devel space location, ``str`` :param installspace: path to install space (CMAKE_INSTALL_PREFIX), ``str`` :param merge: if True, build each catkin package into the same devel space (not affecting plain cmake packages), ``bool`` :param install: if True, install all packages to the install space, ``bool`` :param force_cmake: (optional), if True calls cmake explicitly for each package, ``bool`` :param colorize: if True, colorize cmake output and other messages, ``bool`` :param build_packages: specific packages to build (all parent packages in the topological order must have been built before), ``str`` :param quiet: if True, hides some build output, ``bool`` :param cmake_args: additional arguments for cmake, ``[str]`` :param make_args: additional arguments for make, ``[str]`` :param catkin_make_args: additional arguments for make but only for catkin packages, ``[str]`` :param continue_from_pkg: indicates whether or not cmi should continue when a package is reached, ``bool`` :param only_pkg_with_deps: only consider the specific packages and their recursive dependencies and ignore all other packages in the workspace, ``[str]`` :param destdir: define DESTDIR for cmake/invocation, ``string`` :param use_ninja: if True, use ninja instead of make, ``bool`` :param use_nmake: if True, use nmake instead of make, ``bool`` :param use_gmake: if True, use gmake instead of make, ``bool`` :param override_build_tool_check: if True, build even if a space was built by another tool previously. ''' if not colorize: disable_ANSI_colors() # Check workspace existance if not os.path.exists(workspace): sys.exit("Workspace path '{0}' does not exist.".format(workspace)) workspace = os.path.abspath(workspace) # Check source space existance if sourcespace is None: sourcespace = os.path.join(workspace, 'src') if not os.path.exists(sourcespace): sys.exit('Could not find source space: {0}'.format(sourcespace)) print('Base path: ' + str(workspace)) print('Source space: ' + str(sourcespace)) # Check build space if buildspace is None: buildspace = os.path.join(workspace, 'build_isolated') if not os.path.exists(buildspace): os.mkdir(buildspace) print('Build space: ' + str(buildspace)) # ensure the build space was previously built by catkin_make_isolated previous_tool = get_previous_tool_used_on_the_space(buildspace) if previous_tool is not None and previous_tool != 'catkin_make_isolated': if override_build_tool_check: print(fmt( "@{yf}Warning: build space at '%s' was previously built by '%s', " "but --override-build-tool-check was passed so continuing anyways." % (buildspace, previous_tool))) else: sys.exit(fmt( "@{rf}The build space at '%s' was previously built by '%s'. " "Please remove the build space or pick a different build space." % (buildspace, previous_tool))) mark_space_as_built_by(buildspace, 'catkin_make_isolated') # Check devel space if develspace is None: develspace = os.path.join(workspace, 'devel_isolated') print('Devel space: ' + str(develspace)) # ensure the devel space was previously built by catkin_make_isolated previous_tool = get_previous_tool_used_on_the_space(develspace) if previous_tool is not None and previous_tool != 'catkin_make_isolated': if override_build_tool_check: print(fmt( "@{yf}Warning: devel space at '%s' was previously built by '%s', " "but --override-build-tool-check was passed so continuing anyways." % (develspace, previous_tool))) else: sys.exit(fmt( "@{rf}The devel space at '%s' was previously built by '%s'. " "Please remove the devel space or pick a different devel space." % (develspace, previous_tool))) mark_space_as_built_by(develspace, 'catkin_make_isolated') # Check install space if installspace is None: installspace = os.path.join(workspace, 'install_isolated') print('Install space: ' + str(installspace)) if cmake_args: print("Additional CMake Arguments: " + " ".join(cmake_args)) else: cmake_args = [] if not [arg for arg in cmake_args if arg.startswith('-G')]: if use_ninja: cmake_args += ['-G', 'Ninja'] elif use_nmake: cmake_args += ['-G', 'NMake Makefiles'] else: # no need to check for use_gmake, as it uses the same generator as make cmake_args += ['-G', 'Unix Makefiles'] elif use_ninja or use_nmake: print(colorize_line("Error: either specify a generator using '-G...' or '--use-[ninja|nmake]' but not both")) sys.exit(1) if make_args: print("Additional make Arguments: " + " ".join(make_args)) else: make_args = [] if catkin_make_args: print("Additional make Arguments for catkin packages: " + " ".join(catkin_make_args)) else: catkin_make_args = [] # Find packages packages = find_packages(sourcespace, exclude_subspaces=True) if not packages: print(fmt("@{yf}No packages found in source space: %s@|" % sourcespace)) # whitelist packages and their dependencies in workspace if only_pkg_with_deps: package_names = [p.name for p in packages.values()] unknown_packages = [name for name in only_pkg_with_deps if name not in package_names] if unknown_packages: sys.exit('Packages not found in the workspace: %s' % ', '.join(unknown_packages)) whitelist_pkg_names = get_package_names_with_recursive_dependencies(packages, only_pkg_with_deps) print('Whitelisted packages: %s' % ', '.join(sorted(whitelist_pkg_names))) packages = {path: p for path, p in packages.items() if p.name in whitelist_pkg_names} # verify that specified package exists in workspace if build_packages: packages_by_name = {p.name: path for path, p in packages.items()} unknown_packages = [p for p in build_packages if p not in packages_by_name] if unknown_packages: sys.exit('Packages not found in the workspace: %s' % ', '.join(unknown_packages)) # evaluate conditions for package in packages.values(): package.evaluate_conditions(os.environ) # Report topological ordering ordered_packages = topological_order_packages(packages, blacklisted=ignore_packages) unknown_build_types = [] msg = [] msg.append('@{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + ('~' * len(str(len(ordered_packages))))) msg.append('@{pf}~~@| traversing %d packages in topological order:' % len(ordered_packages)) for path, package in ordered_packages: if path is None: print(fmt('@{rf}Error: Circular dependency in subset of packages: @!%s@|' % package)) sys.exit('Can not build workspace with circular dependency') export_tags = [e.tagname for e in package.exports] if 'build_type' in export_tags: build_type_tag = [e.content for e in package.exports if e.tagname == 'build_type'][0] else: build_type_tag = 'catkin' if build_type_tag == 'catkin': msg.append('@{pf}~~@| - @!@{bf}' + package.name + '@|') elif build_type_tag == 'cmake': msg.append( '@{pf}~~@| - @!@{bf}' + package.name + '@|' + ' (@!@{cf}plain cmake@|)' ) else: msg.append( '@{pf}~~@| - @!@{bf}' + package.name + '@|' + ' (@{rf}unknown@|)' ) unknown_build_types.append(package) msg.append('@{pf}~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~' + ('~' * len(str(len(ordered_packages))))) for index in range(len(msg)): msg[index] = fmt(msg[index]) print('\n'.join(msg)) # Error if there are packages with unknown build_types if unknown_build_types: print(colorize_line('Error: Packages with unknown build types exist')) sys.exit('Can not build workspace with packages of unknown build_type') # Check to see if the workspace has changed cmake_args_with_spaces = list(cmake_args) if develspace: cmake_args_with_spaces.append('-DCATKIN_DEVEL_PREFIX=' + develspace) if installspace: cmake_args_with_spaces.append('-DCMAKE_INSTALL_PREFIX=' + installspace) if ( not force_cmake and cmake_input_changed(packages, buildspace, cmake_args=cmake_args_with_spaces, filename='catkin_make_isolated') ): print('The packages or cmake arguments have changed, forcing cmake invocation') force_cmake = True ensure_workspace_marker(workspace) # Build packages pkg_develspace = None last_env = None for index, path_package in enumerate(ordered_packages): path, package = path_package if merge: pkg_develspace = develspace else: pkg_develspace = os.path.join(develspace, package.name) if not build_packages or package.name in build_packages: if continue_from_pkg and build_packages and package.name in build_packages: build_packages = None try: print() last_env = build_package( path, package, workspace, buildspace, pkg_develspace, installspace, install, force_cmake, quiet, last_env, cmake_args, make_args, catkin_make_args, destdir=destdir, use_ninja=use_ninja, use_nmake=use_nmake, use_gmake=use_gmake, number=index + 1, of=len(ordered_packages) ) except subprocess.CalledProcessError as e: _print_build_error(package, e) # Let users know how to reproduce # First add the cd to the build folder of the package cmd = 'cd ' + quote(os.path.join(buildspace, package.name)) + ' && ' # Then reproduce the command called if isinstance(e.cmd, list): # quote arguments to allow copy-n-paste of command cmd += ' '.join([quote(arg) for arg in e.cmd]) else: cmd += e.cmd print(fmt("\n@{rf}Reproduce this error by running:")) print(fmt("@{gf}@!==> @|") + cmd + "\n") sys.exit('Command failed, exiting.') except Exception as e: print("Unhandled exception of type '{0}':".format(type(e).__name__)) import traceback traceback.print_exc() _print_build_error(package, e) sys.exit('Command failed, exiting.') else: cprint("Skipping package: '@!@{bf}" + package.name + "@|'") last_env = get_new_env(package, pkg_develspace, installspace, install, last_env, destdir) # Provide a top level devel space environment setup script if not os.path.exists(develspace): os.makedirs(develspace) if not build_packages: env_script = 'env' if sys.platform == 'win32': env_script += '.bat' env_script_content = """\ @echo off REM generated from catkin.builder Python module call {0} %* """ setup_script_content = """\ @echo off REM generated from catkin.builder Python module call "{0}/setup.{1}" """ else: env_script += '.sh' env_script_content = """\ #!/usr/bin/env sh # generated from catkin.builder Python module {0} "$@" """ setup_script_content = """\ #!/usr/bin/env {1} # generated from catkin.builder Python module . "{0}/setup.{1}" """ setup_sh_content = """\ #!/usr/bin/env sh # generated from catkin.builder Python module if [ ! -z "$_CATKIN_SETUP_DIR" ] && [ "$_CATKIN_SETUP_DIR" != "{1}" ]; then echo "Relocation of this workspace is not supported" return 1 fi _CATKIN_SETUP_DIR= . "{0}/setup.sh" """ generated_env_sh = os.path.join(develspace, env_script) generated_setup_util_py = os.path.join(develspace, '_setup_util.py') if not merge and pkg_develspace: # generate env script and setup.sh|bash|zsh or setup.bat which relay to last devel space with open(generated_env_sh, 'w') as f: f.write(env_script_content.format(os.path.join(pkg_develspace, env_script))) os.chmod(generated_env_sh, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR) shells_to_write = ['bat'] if sys.platform == 'win32' else ['bash', 'zsh'] for shell in shells_to_write: with open(os.path.join(develspace, 'setup.%s' % shell), 'w') as f: f.write(setup_script_content.format(pkg_develspace, shell)) if sys.platform != 'win32': with open(os.path.join(develspace, 'setup.sh'), 'w') as f: f.write(setup_sh_content.format(pkg_develspace, develspace)) # remove _setup_util.py file which might have been generated for an empty devel space before if os.path.exists(generated_setup_util_py): os.remove(generated_setup_util_py) elif not pkg_develspace: # generate env.* and setup.* scripts for an empty devel space if 'CMAKE_PREFIX_PATH' in os.environ.keys(): variables = { 'CATKIN_GLOBAL_BIN_DESTINATION': 'bin', 'CATKIN_LIB_ENVIRONMENT_PATHS': "'lib'", 'CATKIN_PKGCONFIG_ENVIRONMENT_PATHS': "os.path.join('lib', 'pkgconfig')", 'CMAKE_PREFIX_PATH_AS_IS': ';'.join(os.environ['CMAKE_PREFIX_PATH'].split(os.pathsep)), 'PYTHON_EXECUTABLE': sys.executable, 'PYTHON_INSTALL_DIR': get_python_install_dir(), } with open(generated_setup_util_py, 'w') as f: f.write(configure_file( os.path.join(get_cmake_path(), 'templates', '_setup_util.py.in'), variables)) os.chmod(generated_setup_util_py, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR) else: sys.exit("Unable to process CMAKE_PREFIX_PATH from environment. Cannot generate environment files.") variables = { 'SETUP_DIR': develspace, 'SETUP_FILENAME': 'setup', } with open(generated_env_sh, 'w') as f: f.write(configure_file(os.path.join(get_cmake_path(), 'templates', env_script + '.in'), variables)) os.chmod(generated_env_sh, stat.S_IXUSR | stat.S_IWUSR | stat.S_IRUSR) variables = { 'PYTHON_EXECUTABLE': sys.executable, 'SETUP_DIR': develspace, } shells_to_write = ['bat'] if sys.platform == 'win32' else ['sh', 'bash', 'zsh'] for shell in shells_to_write: with open(os.path.join(develspace, 'setup.%s' % shell), 'w') as f: f.write(configure_file( os.path.join(get_cmake_path(), 'templates', 'setup.%s.in' % shell), variables))
def build_catkin_package( path, package, workspace, buildspace, develspace, installspace, install, force_cmake, quiet, last_env, cmake_args, make_args, destdir=None, use_ninja=False, use_nmake=False, use_gmake=False ): cprint( "Processing @{cf}catkin@| package: '@!@{bf}" + package.name + "@|'" ) # Make the build dir build_dir = _check_build_dir(package.name, workspace, buildspace) # Check last_env if last_env is not None: cprint( blue_arrow + " Building with env: " + "'{0}'".format(sanitize(last_env)) ) # Check for Makefile and maybe call cmake if not use_ninja: makefile_name = 'Makefile' else: makefile_name = 'build.ninja' makefile = os.path.join(build_dir, makefile_name) if not os.path.exists(makefile) or force_cmake: package_dir = os.path.dirname(package.filename) if not os.path.exists(os.path.join(package_dir, 'CMakeLists.txt')): export_tags = [e.tagname for e in package.exports] if 'metapackage' not in export_tags: print(colorize_line('Error: Package "%s" does not have a CMakeLists.txt file' % package.name)) sys.exit('Can not build catkin package without CMakeLists.txt file') # generate CMakeLists.txt for metpackages without one print(colorize_line('Warning: metapackage "%s" should have a CMakeLists.txt file' % package.name)) cmake_code = configure_file( get_metapackage_cmake_template_path(), {'name': package.name, 'metapackage_arguments': 'DIRECTORY "%s"' % package_dir}) cmakelists_txt = os.path.join(build_dir, 'CMakeLists.txt') with open(cmakelists_txt, 'w') as f: f.write(cmake_code) package_dir = build_dir # Run cmake cmake_cmd = [ 'cmake', package_dir, '-DCATKIN_DEVEL_PREFIX=' + develspace, '-DCMAKE_INSTALL_PREFIX=' + installspace ] cmake_cmd.extend(cmake_args) add_env = get_additional_environment(install, destdir, installspace) isolation_print_command(' '.join(cmake_cmd), build_dir, add_env=add_env) if last_env is not None: cmake_cmd = [last_env] + cmake_cmd try: run_command_colorized(cmake_cmd, build_dir, quiet, add_env=add_env) except subprocess.CalledProcessError as e: if os.path.exists(makefile): # remove Makefile to force CMake invocation next time os.remove(makefile) raise else: print('%s exists, skipping explicit cmake invocation...' % makefile_name) # Check to see if cmake needs to be run via make if use_ninja: make_check_cmake_cmd = ['ninja', 'build.ninja'] elif use_nmake: make_check_cmake_cmd = ['nmake', 'cmake_check_build_system'] elif use_gmake: make_check_cmake_cmd = ['gmake', 'cmake_check_build_system'] else: make_check_cmake_cmd = ['make', 'cmake_check_build_system'] add_env = get_additional_environment(install, destdir, installspace) isolation_print_command(' '.join(make_check_cmake_cmd), build_dir, add_env=add_env) if last_env is not None: make_check_cmake_cmd = [last_env] + make_check_cmake_cmd run_command_colorized( make_check_cmake_cmd, build_dir, quiet, add_env=add_env ) # Run make if use_ninja: make_executable = 'ninja' elif use_nmake: make_executable = 'nmake' elif use_gmake: make_executable = 'gmake' else: make_executable = 'make' make_cmd = [make_executable] make_cmd.extend(handle_make_arguments(make_args, append_default_jobs_flags=not use_nmake)) isolation_print_command(' '.join(make_cmd), build_dir) if last_env is not None: make_cmd = [last_env] + make_cmd run_command(make_cmd, build_dir, quiet) # Make install # NMake doesn't have an option to list target so try it anyway if install or use_nmake: if has_make_target(build_dir, 'install', use_ninja=use_ninja, use_nmake=use_nmake, use_gmake=use_gmake): make_install_cmd = [make_executable, 'install'] isolation_print_command(' '.join(make_install_cmd), build_dir) if last_env is not None: make_install_cmd = [last_env] + make_install_cmd run_command(make_install_cmd, build_dir, quiet) else: print(fmt( '@{yf}Package has no "@{boldon}install@{boldoff}" target, skipping "%s install" invocation...' % make_executable))
def cprint(msg, end=None): print(fmt(msg), end=end)
def _main(): parser = argparse.ArgumentParser( description= 'Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.' % PACKAGE_MANIFEST_FILENAME) parser.add_argument( '--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)' ) parser.add_argument('--version', help='Specify a specific version to use') parser.add_argument('--no-color', action='store_true', default=False, help='Disables colored output') parser.add_argument('--no-push', action='store_true', default=False, help='Disables pushing to remote repository') parser.add_argument('-t', '--tag-prefix', default='', help='Add this prefix to the created release tag') parser.add_argument( '-y', '--non-interactive', action='store_true', default=False, help="Run without user interaction, confirming all questions with 'yes'" ) args = parser.parse_args() if args.version and not re.match( '^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', args.version): parser.error( 'The passed version must follow the conventions (positive integers x.y.z with no leading zeros)' ) if args.tag_prefix and ' ' in args.tag_prefix: parser.error('The tag prefix must not contain spaces') # force --no-color if stdout is non-interactive if not sys.stdout.isatty(): args.no_color = True # disable colors if asked if args.no_color: disable_ANSI_colors() base_path = '.' print(fmt('@{gf}Prepare the source repository for a release.')) # determine repository type vcs_type = get_repository_type(base_path) if vcs_type is None: raise RuntimeError( fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}" % base_path)) print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type)) # find packages try: packages = find_packages(base_path) except InvalidPackage as e: raise RuntimeError( fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n %s" % (os.path.abspath(base_path), str(e)))) if not packages: raise RuntimeError(fmt('@{rf}No packages found')) print('Found packages: %s' % ', '.join([ fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name) for p in packages.values() ])) # complain about packages with non-catkin build_type as they might require additional steps before being released # complain about packages with upper case character since they won't be releasable with bloom non_catkin_pkg_names = [] invalid_pkg_names = [] for package in packages.values(): build_types = [ export.content for export in package.exports if export.tagname == 'build_type' ] build_type = build_types[0] if build_types else 'catkin' if build_type != 'catkin': non_catkin_pkg_names.append(package.name) if package.name != package.name.lower(): invalid_pkg_names.append(package.name) if non_catkin_pkg_names: print(fmt( "@{yf}Warning: the following package are not of build_type catkin and may require manual steps to release': %s" % ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(non_catkin_pkg_names)])), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue anyway', default=False): raise RuntimeError( fmt("@{rf}Aborted release, verify that non-catkin packages are ready to be released or release manually." )) if invalid_pkg_names: print(fmt( "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s" % ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(invalid_pkg_names)])), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue anyway', default=False): raise RuntimeError( fmt("@{rf}Aborted release, fix the names of the packages.")) local_modifications = [] for pkg_path, package in packages.items(): # verify that the package.xml files don't have modifications pending package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME) if has_changes(base_path, package_xml_path, vcs_type): local_modifications.append(package_xml_path) # verify that metapackages are valid if package.is_metapackage(): try: metapackage.validate_metapackage(pkg_path, package) except metapackage.InvalidMetapackage as e: raise RuntimeError( fmt("@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n %s\n\nSee requirements for metapackages: %s" % (os.path.abspath(pkg_path), str(e), metapackage.DEFINITION_URL))) # fetch current version and verify that all packages have same version number old_version = verify_equal_package_versions(packages.values()) if args.version: new_version = args.version else: new_version = bump_version(old_version, args.bump) tag_name = args.tag_prefix + new_version if (not args.non_interactive and not prompt_continue(fmt( "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s" % (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" % tag_name if args.tag_prefix else '')), default=True)): raise RuntimeError( fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name." )) # check for changelog entries missing_changelogs = [] missing_changelogs_but_forthcoming = {} for pkg_path, package in packages.items(): changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME) if not os.path.exists(changelog_path): missing_changelogs.append(package.name) continue # verify that the changelog files don't have modifications pending if has_changes(base_path, changelog_path, vcs_type): local_modifications.append(changelog_path) changelog = get_changelog_from_path(changelog_path, package.name) try: changelog.get_content_of_version(new_version) except KeyError: # check that forthcoming section exists forthcoming_label = get_forthcoming_label(changelog.rst) if forthcoming_label: missing_changelogs_but_forthcoming[package.name] = ( changelog_path, changelog, forthcoming_label) else: missing_changelogs.append(package.name) if local_modifications: raise RuntimeError( fmt('@{rf}The following files have modifications, please commit/revert them before:' + ''.join([('\n- @{boldon}%s@{boldoff}' % path) for path in local_modifications]))) if missing_changelogs: print(fmt( "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s" % (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs)]))), file=sys.stderr) if not args.non_interactive and not prompt_continue( 'Continue without changelogs', default=False): raise RuntimeError( fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content." )) # verify that repository is pushable (if the vcs supports dry run of push) if not args.no_push: try_repo_push(base_path, vcs_type) # check for staged changes and modified and untracked files print( fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...' )) is_clean = check_clean_working_copy(base_path, vcs_type) if not is_clean: print(fmt( '@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.' ), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue anyway', default=False): raise RuntimeError( fmt("@{rf}Aborted release, clean the working copy before trying again." )) # for svn verify that we know how to tag that repository if vcs_type in ['svn']: tag_svn_cmd = tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '', dry_run=True) # tag forthcoming changelog sections update_changelog_sections(missing_changelogs_but_forthcoming, new_version) print( fmt("@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s" % (new_version, ', '.join([ ('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs_but_forthcoming.keys()) ])))) # bump version number update_versions(packages.keys(), new_version) print( fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'" % (old_version, new_version))) pushed = None if vcs_type in ['svn']: # for svn everything affects the remote repository immediately commands = [] commands.append( commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name, dry_run=True)) commands.append(tag_svn_cmd) if not args.no_push: print( fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:' )) else: print( fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:' )) for cmd in commands: print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd))) if not args.no_push: if not args.non_interactive: # confirm before modifying repository if not prompt_continue( 'Execute commands which will modify the repository', default=True): pushed = False if pushed is None: commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name) tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '') pushed = True else: # for other vcs types the changes are first done locally print(fmt('@{gf}Committing the package.xml files...')) commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name) print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name))) tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '') try: commands = push_changes(base_path, vcs_type, dry_run=True) except RuntimeError as e: print( fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?' )) else: if not args.no_push: print( fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:' )) else: print( fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:' )) for cmd in commands: print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd))) if not args.no_push: if not args.non_interactive: # confirm commands to push to remote repository if not prompt_continue( 'Execute commands to push the local commits and tags to the remote repository', default=True): pushed = False if pushed is None: push_changes(base_path, vcs_type) pushed = True if pushed: print( fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'." )) else: msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \ "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'." if args.no_push or pushed is False: print(fmt('@{yf}%s' % msg)) else: raise RuntimeError(fmt('@{rf}%s' % msg))
def _find_executable(vcs_type): for path in os.getenv('PATH').split(os.path.pathsep): file_path = os.path.join(path, vcs_type) if os.path.isfile(file_path): return file_path raise RuntimeError(fmt('@{rf}Could not find vcs binary: %s' % vcs_type))
def _find_executable(vcs_type): file_path = which(vcs_type) if file_path is None: raise RuntimeError(fmt('@{rf}Could not find vcs binary: %s' % vcs_type)) return file_path
def _find_executable(vcs_type): for path in os.getenv('PATH').split(os.path.pathsep): file_path = os.path.join(path, vcs_type) if os.path.isfile(file_path): return file_path raise RuntimeError(fmt('@{rf}Could not find vcs binary: %s' % vcs_type))
def _main(): parser = argparse.ArgumentParser( description='Runs the commands to bump the version number, commit the modified %s files and create a tag in the repository.' % PACKAGE_MANIFEST_FILENAME) parser.add_argument('--bump', choices=('major', 'minor', 'patch'), default='patch', help='Which part of the version number to bump? (default: %(default)s)') parser.add_argument('--version', help='Specify a specific version to use') parser.add_argument('--no-color', action='store_true', default=False, help='Disables colored output') parser.add_argument('--no-push', action='store_true', default=False, help='Disables pushing to remote repository') parser.add_argument('-t', '--tag-prefix', default='', help='Add this prefix to the created release tag') parser.add_argument('-y', '--non-interactive', action='store_true', default=False, help="Run without user interaction, confirming all questions with 'yes'") args = parser.parse_args() if args.version and not re.match('^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)$', args.version): parser.error('The passed version must follow the conventions (positive integers x.y.z with no leading zeros)') if args.tag_prefix and ' ' in args.tag_prefix: parser.error('The tag prefix must not contain spaces') # force --no-color if stdout is non-interactive if not sys.stdout.isatty(): args.no_color = True # disable colors if asked if args.no_color: disable_ANSI_colors() base_path = '.' print(fmt('@{gf}Prepare the source repository for a release.')) # determine repository type vcs_type = get_repository_type(base_path) if vcs_type is None: raise RuntimeError(fmt("@{rf}Could not determine repository type of @{boldon}'%s'@{boldoff}" % base_path)) print(fmt('Repository type: @{boldon}%s@{boldoff}' % vcs_type)) # find packages try: packages = find_packages(base_path) except InvalidPackage as e: raise RuntimeError(fmt("@{rf}Invalid package at path @{boldon}'%s'@{boldoff}:\n %s" % (os.path.abspath(base_path), str(e)))) if not packages: raise RuntimeError(fmt('@{rf}No packages found')) print('Found packages: %s' % ', '.join([fmt('@{bf}@{boldon}%s@{boldoff}@{reset}' % p.name) for p in packages.values()])) # complain about packages with non-catkin build_type as they might require additional steps before being released # complain about packages with upper case character since they won't be releasable with bloom non_catkin_pkg_names = [] invalid_pkg_names = [] for package in packages.values(): build_types = [export.content for export in package.exports if export.tagname == 'build_type'] build_type = build_types[0] if build_types else 'catkin' if build_type != 'catkin': non_catkin_pkg_names.append(package.name) if package.name != package.name.lower(): invalid_pkg_names.append(package.name) if non_catkin_pkg_names: print( fmt( "@{yf}Warning: the following package are not of build_type catkin and may require manual steps to release': %s" % ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(non_catkin_pkg_names)]) ), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue anyway', default=False): raise RuntimeError(fmt('@{rf}Aborted release, verify that non-catkin packages are ready to be released or release manually.')) if invalid_pkg_names: print( fmt( "@{yf}Warning: the following package names contain upper case characters which violate both ROS and Debian naming conventions': %s" % ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(invalid_pkg_names)]) ), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue anyway', default=False): raise RuntimeError(fmt('@{rf}Aborted release, fix the names of the packages.')) local_modifications = [] for pkg_path, package in packages.items(): # verify that the package.xml files don't have modifications pending package_xml_path = os.path.join(pkg_path, PACKAGE_MANIFEST_FILENAME) if has_changes(base_path, package_xml_path, vcs_type): local_modifications.append(package_xml_path) # verify that metapackages are valid if package.is_metapackage(): try: metapackage.validate_metapackage(pkg_path, package) except metapackage.InvalidMetapackage as e: raise RuntimeError(fmt( "@{rf}Invalid metapackage at path '@{boldon}%s@{boldoff}':\n %s\n\nSee requirements for metapackages: %s" % (os.path.abspath(pkg_path), str(e), metapackage.DEFINITION_URL))) # fetch current version and verify that all packages have same version number old_version = verify_equal_package_versions(packages.values()) if args.version: new_version = args.version else: new_version = bump_version(old_version, args.bump) tag_name = args.tag_prefix + new_version if ( not args.non_interactive and not prompt_continue( fmt( "Prepare release of version '@{bf}@{boldon}%s@{boldoff}@{reset}'%s" % (new_version, " (tagged as '@{bf}@{boldon}%s@{boldoff}@{reset}')" % tag_name if args.tag_prefix else '') ), default=True) ): raise RuntimeError(fmt("@{rf}Aborted release, use option '--bump' to release a different version and/or '--tag-prefix' to add a prefix to the tag name.")) # check for changelog entries missing_changelogs = [] missing_changelogs_but_forthcoming = {} for pkg_path, package in packages.items(): changelog_path = os.path.join(pkg_path, CHANGELOG_FILENAME) if not os.path.exists(changelog_path): missing_changelogs.append(package.name) continue # verify that the changelog files don't have modifications pending if has_changes(base_path, changelog_path, vcs_type): local_modifications.append(changelog_path) changelog = get_changelog_from_path(changelog_path, package.name) try: changelog.get_content_of_version(new_version) except KeyError: # check that forthcoming section exists forthcoming_label = get_forthcoming_label(changelog.rst) if forthcoming_label: missing_changelogs_but_forthcoming[package.name] = (changelog_path, changelog, forthcoming_label) else: missing_changelogs.append(package.name) if local_modifications: raise RuntimeError(fmt('@{rf}The following files have modifications, please commit/revert them before:' + ''.join([('\n- @{boldon}%s@{boldoff}' % path) for path in local_modifications]))) if missing_changelogs: print( fmt( "@{yf}Warning: the following packages do not have a changelog file or entry for version '@{boldon}%s@{boldoff}': %s" % (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs)])) ), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue without changelogs', default=False): raise RuntimeError(fmt("@{rf}Aborted release, populate the changelog with '@{boldon}catkin_generate_changelog@{boldoff}' and review / clean up the content.")) # verify that repository is pushable (if the vcs supports dry run of push) if not args.no_push: try_repo_push(base_path, vcs_type) # check for staged changes and modified and untracked files print(fmt('@{gf}Checking if working copy is clean (no staged changes, no modified files, no untracked files)...')) is_clean = check_clean_working_copy(base_path, vcs_type) if not is_clean: print(fmt('@{yf}Warning: the working copy contains other changes. Consider reverting/committing/stashing them before preparing a release.'), file=sys.stderr) if not args.non_interactive and not prompt_continue('Continue anyway', default=False): raise RuntimeError(fmt('@{rf}Aborted release, clean the working copy before trying again.')) # for svn verify that we know how to tag that repository if vcs_type in ['svn']: tag_svn_cmd = tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '', dry_run=True) # tag forthcoming changelog sections update_changelog_sections(missing_changelogs_but_forthcoming, new_version) print(fmt( "@{gf}Rename the forthcoming section@{reset} of the following packages to version '@{bf}@{boldon}%s@{boldoff}@{reset}': %s" % (new_version, ', '.join([('@{boldon}%s@{boldoff}' % p) for p in sorted(missing_changelogs_but_forthcoming.keys())])))) # bump version number update_versions(packages.keys(), new_version) print(fmt("@{gf}Bump version@{reset} of all packages from '@{bf}%s@{reset}' to '@{bf}@{boldon}%s@{boldoff}@{reset}'" % (old_version, new_version))) pushed = None if vcs_type in ['svn']: # for svn everything affects the remote repository immediately commands = [] commands.append(commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name, dry_run=True)) commands.append(tag_svn_cmd) if not args.no_push: print(fmt('@{gf}The following commands will be executed to commit the changes and tag the new version:')) else: print(fmt('@{gf}You can use the following commands to manually commit the changes and tag the new version:')) for cmd in commands: print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd))) if not args.no_push: if not args.non_interactive: # confirm before modifying repository if not prompt_continue('Execute commands which will modify the repository', default=True): pushed = False if pushed is None: commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name) tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '') pushed = True else: # for other vcs types the changes are first done locally print(fmt('@{gf}Committing the package.xml files...')) commit_files(base_path, vcs_type, packages, missing_changelogs_but_forthcoming, tag_name) print(fmt("@{gf}Creating tag '@{boldon}%s@{boldoff}'..." % (tag_name))) tag_repository(base_path, vcs_type, tag_name, args.tag_prefix != '') try: commands = push_changes(base_path, vcs_type, tag_name, dry_run=True) except RuntimeError: print(fmt('@{yf}Warning: could not determine commands to push the changes and tag to the remote repository. Do you have a remote configured for the current branch?')) else: if not args.no_push: print(fmt('@{gf}The following commands will be executed to push the changes and tag to the remote repository:')) else: print(fmt('@{gf}You can use the following commands to manually push the changes to the remote repository:')) for cmd in commands: print(fmt(' @{bf}@{boldon}%s@{boldoff}' % ' '.join(cmd))) if not args.no_push: if not args.non_interactive: # confirm commands to push to remote repository if not prompt_continue('Execute commands to push the local commits and tags to the remote repository', default=True): pushed = False if pushed is None: push_changes(base_path, vcs_type, tag_name) pushed = True if pushed: print(fmt("@{gf}The source repository has been released successfully. The next step will be '@{boldon}bloom-release@{boldoff}'.")) else: msg = 'The release of the source repository has been prepared successfully but the changes have not been pushed yet. ' \ "After pushing the changes manually the next step will be '@{boldon}bloom-release@{boldoff}'." if args.no_push or pushed is False: print(fmt('@{yf}%s' % msg)) else: raise RuntimeError(fmt('@{rf}%s' % msg))
def _find_executable(vcs_type): file_path = which(vcs_type) if file_path is None: raise RuntimeError(fmt('@{rf}Could not find vcs binary: %s' % vcs_type)) return file_path