def main(sysargs=None): # Check that the current directory is a serviceable git/bloom repo ensure_clean_working_env() ensure_git_root() # Get tracks tracks_dict = get_tracks_dict_raw() if not tracks_dict['tracks']: error("No tracks configured, first create a track with " "'git-bloom-config new <track_name>'", exit=True) # Do argparse stuff parser = get_argument_parser([str(t) for t in tracks_dict['tracks']]) parser = add_global_arguments(parser) args = parser.parse_args(sysargs) handle_global_arguments(args) verify_track(args.track, tracks_dict['tracks'][args.track]) execute_track(args.track, tracks_dict['tracks'][args.track], args.release_increment, args.pretend, args.debug, args.unsafe) # Notify the user of success and next action suggestions print('\n\n') warning("Tip: Check to ensure that the debian tags created have the same " "version as the upstream version you are releasing.") info(fmt("@{gf}@!Everything went as expected, " "you should check that the new tags match your expectations, and " "then push to the release repo with:@|")) info(fmt(" git push --all && git push --tags " "@{kf}@!# You might have to add --force to the second command if you " "are over-writing existing flags"))
def generate_ros_distro_diff(track, repository, distro, distro_file_url, distro_file, distro_file_raw): with inbranch('upstream'): # Check for package.xml(s) try: from catkin_pkg.packages import find_packages except ImportError: debug(traceback.format_exc()) error("catkin_pkg was not detected, please install it.", file=sys.stderr, exit=True) packages = find_packages(os.getcwd()) if len(packages) == 0: warning("No packages found, will not generate 'package: path' entries for rosdistro.") track_dict = get_tracks_dict_raw()['tracks'][track] last_version = track_dict['last_version'] release_inc = track_dict['release_inc'] if repository not in distro_file['repositories']: global _user_provided_release_url distro_file['repositories'][repository] = {'url': _user_provided_release_url or ''} distro_file['repositories'][repository]['version'] = '{0}-{1}'.format(last_version, release_inc) if packages and (len(packages) > 1 or packages.keys()[0] != '.'): distro_file['repositories'][repository]['packages'] = {} for path, package in packages.iteritems(): if os.path.basename(path) == package.name: distro_file['repositories'][repository]['packages'][package.name] = None else: distro_file['repositories'][repository]['packages'][package.name] = path distro_file_name = os.path.join('release', distro_file_url.split('/')[-1]) distro_dump = yaml.dump(distro_file, indent=2, default_flow_style=False) if distro_file_raw != distro_dump: udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(), fromfile=distro_file_name, tofile=distro_file_name) temp_dir = tempfile.mkdtemp() version = distro_file['repositories'][repository]['version'] udiff_file = os.path.join(temp_dir, repository + '-' + version + '.patch') udiff_raw = '' info("Unified diff for the ROS distro file located at '{0}':".format(udiff_file)) for line in udiff: if line.startswith('@@'): udiff_raw += line line = fmt('@{cf}' + line) if line.startswith('+'): if not line.startswith('+++'): line += '\n' udiff_raw += line line = fmt('@{gf}' + line) if line.startswith('-'): if not line.startswith('---'): line += '\n' udiff_raw += line line = fmt('@{rf}' + line) if line.startswith(' '): line += '\n' udiff_raw += line info(line, use_prefix=False, end='') with open(udiff_file, 'w+') as f: f.write(udiff_raw) return udiff_file, distro_dump else: warning("This release resulted in no changes to the ROS distro file...") return None, None
def _my_run(cmd, msg=None): if msg: info(fmt("@{bf}@!==> @|@!" + sanitize(msg))) else: info(fmt("@{bf}@!==> @|@!" + sanitize(str(cmd)))) from subprocess import check_call check_call(cmd, shell=True)
def _check_all_keys_are_valid(self, peer_packages, rosdistro): keys_to_resolve = set() key_to_packages_which_depends_on = collections.defaultdict(list) keys_to_ignore = set() for package in self.packages.values(): evaluate_package_conditions(package, rosdistro) depends = [ dep for dep in (package.run_depends + package.buildtool_export_depends) if dep.evaluated_condition is not False] build_depends = [ dep for dep in (package.build_depends + package.buildtool_depends + package.test_depends) if dep.evaluated_condition is not False] unresolved_keys = [ dep for dep in (depends + build_depends + package.replaces + package.conflicts) if dep.evaluated_condition is not False] keys_to_ignore = { dep for dep in keys_to_ignore.union(package.replaces + package.conflicts) if dep.evaluated_condition is not False} keys = [d.name for d in unresolved_keys] keys_to_resolve.update(keys) for key in keys: key_to_packages_which_depends_on[key].append(package.name) for skip_key in self.skip_keys: try: keys_to_resolve.remove(skip_key) except KeyError: warning("Key '{0}' specified by --skip-keys was not found".format(skip_key)) else: warning("Skipping dependency key '{0}' per --skip-keys".format(skip_key)) os_name = self.os_name rosdistro = self.rosdistro all_keys_valid = True for key in sorted(keys_to_resolve): for os_version in self.distros: try: extended_peer_packages = peer_packages + [d.name for d in keys_to_ignore] rule, installer_key, default_installer_key = \ resolve_rosdep_key(key, os_name, os_version, rosdistro, extended_peer_packages, retry=False) if rule is None: continue if installer_key != default_installer_key: error("Key '{0}' resolved to '{1}' with installer '{2}', " "which does not match the default installer '{3}'." .format(key, rule, installer_key, default_installer_key)) BloomGenerator.exit( "The RPM generator does not support dependencies " "which are installed with the '{0}' installer." .format(installer_key), returncode=code.GENERATOR_INVALID_INSTALLER_KEY) except (GeneratorError, RuntimeError) as e: print(fmt("Failed to resolve @{cf}@!{key}@| on @{bf}{os_name}@|:@{cf}@!{os_version}@| with: {e}") .format(**locals())) print(fmt("@{cf}@!{0}@| is depended on by these packages: ").format(key) + str(list(set(key_to_packages_which_depends_on[key])))) print(fmt("@{kf}@!<== @{rf}@!Failed@|")) all_keys_valid = False return all_keys_valid
def main(args=None, get_subs_fn=None): get_subs_fn = get_subs_fn or get_subs _place_template_files = True _process_template_files = True package_path = os.getcwd() if args is not None: package_path = args.package_path or os.getcwd() _place_template_files = args.place_template_files _process_template_files = args.process_template_files pkgs_dict = find_packages(package_path) if len(pkgs_dict) == 0: sys.exit("No packages found in path: '{0}'".format(package_path)) if len(pkgs_dict) > 1: sys.exit("Multiple packages found, " "this tool only supports one package at a time.") os_data = create_default_installer_context().get_os_name_and_version() os_name, os_version = os_data ros_distro = os.environ.get("ROS_DISTRO", "indigo") # Allow args overrides os_name = args.os_name or os_name os_version = args.os_version or os_version ros_distro = args.ros_distro or ros_distro # Summarize info( fmt("@!@{gf}==> @|") + fmt( "Generating debs for @{cf}%s:%s@| for package(s) %s" % (os_name, os_version, [p.name for p in pkgs_dict.values()]) ) ) for path, pkg in pkgs_dict.items(): template_files = None try: subs = get_subs_fn(pkg, os_name, os_version, ros_distro) if _place_template_files: # Place template files place_template_files(path) if _process_template_files: # Just process existing template files template_files = process_template_files(path, subs) if not _place_template_files and not _process_template_files: # If neither, do both place_template_files(path) template_files = process_template_files(path, subs) if template_files is not None: for template_file in template_files: os.remove(os.path.normpath(template_file)) except Exception as exc: debug(traceback.format_exc()) error(type(exc).__name__ + ": " + str(exc), exit=True) except (KeyboardInterrupt, EOFError): sys.exit(1)
def build_debian_pkg(args=None, get_subs_fn=None): get_subs_fn = get_subs_fn or get_subs _place_template_files = True _process_template_files = True package_path = os.getcwd() if args is not None: package_path = args.package_path or os.getcwd() _place_template_files = args.place_template_files _process_template_files = args.process_template_files pkgs_dict = find_packages(package_path) if len(pkgs_dict) == 0: sys.exit("No packages found in path: '{0}'".format(package_path)) # if len(pkgs_dict) > 1: # sys.exit("Multiple packages found, " # "this tool only supports one package at a time.") os_data = create_default_installer_context().get_os_name_and_version() os_name, os_version = os_data ros_distro = os.environ.get('ROS_DISTRO', 'indigo') # Allow args overrides os_name = args.os_name or os_name os_version = args.os_version or os_version ros_distro = args.ros_distro or ros_distro install_prefix = args.install_prefix or "/opt" # Summarize info(fmt("@!@{gf}==> @|") + fmt("Generating debs for @{cf}%s:%s@| for package(s) %s" % (os_name, os_version, [p.name for p in pkgs_dict.values()]))) # Test Creating single all_subs = merge_packages(pkgs_dict, get_subs_fn, os_name, os_version, ros_distro, install_prefix, args.native) path = '' build_type = 'cmake' try: if _place_template_files: # Place template files place_template_files(path, build_type) if _process_template_files: # Just process existing template files template_files = process_template_files(path, all_subs) if not _place_template_files and not _process_template_files: # If neither, do both place_template_files(path, build_type) template_files = process_template_files(path, all_subs) if template_files is not None: for template_file in template_files: os.remove(os.path.normpath(template_file)) except Exception as exc: debug(traceback.format_exc()) error(type(exc).__name__ + ": " + str(exc), exit=True) except (KeyboardInterrupt, EOFError): sys.exit(1)
def main(sysargs=None): from bloom.config import upconvert_bloom_to_config_branch upconvert_bloom_to_config_branch() # Check that the current directory is a serviceable git/bloom repo ensure_clean_working_env() ensure_git_root() # Get tracks tracks_dict = get_tracks_dict_raw() if not tracks_dict['tracks']: error( "No tracks configured, first create a track with " "'git-bloom-config new <track_name>'", exit=True) # Do argparse stuff parser = get_argument_parser([str(t) for t in tracks_dict['tracks']]) parser = add_global_arguments(parser) args = parser.parse_args(sysargs) handle_global_arguments(args) os.environ['BLOOM_TRACK'] = args.track verify_track(args.track, tracks_dict['tracks'][args.track]) git_clone = GitClone() with git_clone: quiet_git_clone_warning(True) disable_git_clone(True) execute_track(args.track, tracks_dict['tracks'][args.track], args.release_increment, args.pretend, args.debug, args.unsafe, interactive=args.interactive) disable_git_clone(False) quiet_git_clone_warning(False) git_clone.commit() # Notify the user of success and next action suggestions info('\n\n', use_prefix=False) warning("Tip: Check to ensure that the debian tags created have the same " "version as the upstream version you are releasing.") info( fmt("@{gf}@!Everything went as expected, " "you should check that the new tags match your expectations, and " "then push to the release repo with:@|")) info( fmt(" git push --all && git push --tags " "@{kf}@!# You might have to add --force to the second command if you " "are over-writing existing tags"))
def _check_all_keys_are_valid(self, peer_packages, rosdistro): keys_to_resolve = [] key_to_packages_which_depends_on = collections.defaultdict(list) keys_to_ignore = set() for package in self.packages.values(): package.evaluate_conditions(package_conditional_context(rosdistro)) depends = [ dep for dep in (package.run_depends + package.buildtool_export_depends) if dep.evaluated_condition] build_depends = [ dep for dep in (package.build_depends + package.buildtool_depends + package.test_depends) if dep.evaluated_condition] unresolved_keys = [ dep for dep in (depends + build_depends + package.replaces + package.conflicts) if dep.evaluated_condition] keys_to_ignore = { dep for dep in keys_to_ignore.union(package.replaces + package.conflicts) if dep.evaluated_condition} keys = [d.name for d in unresolved_keys] keys_to_resolve.extend(keys) for key in keys: key_to_packages_which_depends_on[key].append(package.name) os_name = self.os_name rosdistro = self.rosdistro all_keys_valid = True for key in sorted(set(keys_to_resolve)): for os_version in self.distros: try: extended_peer_packages = peer_packages + [d.name for d in keys_to_ignore] rule, installer_key, default_installer_key = \ resolve_rosdep_key(key, os_name, os_version, rosdistro, extended_peer_packages, retry=False) if rule is None: continue if installer_key != default_installer_key: error("Key '{0}' resolved to '{1}' with installer '{2}', " "which does not match the default installer '{3}'." .format(key, rule, installer_key, default_installer_key)) BloomGenerator.exit( "The RPM generator does not support dependencies " "which are installed with the '{0}' installer." .format(installer_key), returncode=code.GENERATOR_INVALID_INSTALLER_KEY) except (GeneratorError, RuntimeError) as e: print(fmt("Failed to resolve @{cf}@!{key}@| on @{bf}{os_name}@|:@{cf}@!{os_version}@| with: {e}") .format(**locals())) print(fmt("@{cf}@!{0}@| is depended on by these packages: ").format(key) + str(list(set(key_to_packages_which_depends_on[key])))) print(fmt("@{kf}@!<== @{rf}@!Failed@|")) all_keys_valid = False return all_keys_valid
def execute_track(track, track_dict, release_inc, pretend=True, debug=False, fast=False): info("Processing release track settings for '{0}'".format(track)) settings = process_track_settings(track_dict, release_inc) # setup extra settings archive_dir_path = tempfile.mkdtemp() settings['archive_dir_path'] = archive_dir_path if settings['release_tag'] != ':{none}': archive_file = '{name}-{release_tag}.tar.gz'.format(**settings) else: archive_file = '{name}.tar.gz'.format(**settings) settings['archive_path'] = os.path.join(archive_dir_path, archive_file) # execute actions info("", use_prefix=False) info("Executing release track '{0}'".format(track)) for action in track_dict['actions']: if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar': warning("Explicitly skipping bloom-export-upstream for tar.") settings['archive_path'] = settings['vcs_uri'] continue templated_action = template_str(action, settings) info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action)))) if pretend: continue stdout = None stderr = None if bloom.util._quiet: stdout = subprocess.PIPE stderr = subprocess.STDOUT if debug and 'DEBUG' not in os.environ: os.environ['DEBUG'] = '1' if fast and 'BLOOM_UNSAFE' not in os.environ: os.environ['BLOOM_UNSAFE'] = '1' templated_action = templated_action.split() templated_action[0] = find_full_path(templated_action[0]) p = subprocess.Popen(templated_action, stdout=stdout, stderr=stderr, shell=False, env=os.environ.copy()) out, err = p.communicate() if bloom.util._quiet: info(out, use_prefix=False) ret = p.returncode if ret > 0: error(fmt(_error + "Error running command '@!{0}'@|") .format(templated_action), exit=True) info('', use_prefix=False) if not pretend: # Update the release_inc tracks_dict = get_tracks_dict_raw() tracks_dict['tracks'][track]['release_inc'] = settings['release_inc'] tracks_dict['tracks'][track]['last_version'] = settings['version'] write_tracks_dict_raw(tracks_dict, 'Updating release inc to: ' + str(settings['release_inc']))
def __str__(self): msg = fmt('@_' + sanitize(self.name) + ':@|') if self.spec is not None: for key, val in self.spec.iteritems(): msg += '\n ' + key for line in val.splitlines(): msg += '\n ' + line else: msg += '\n ' + self.prompt msg += '\n ' if self.default is None: msg += fmt(" @![@{yf}None@|@!]@|: ") else: msg += fmt(" @!['@{yf}" + sanitize(self.default) + "@|@!']@|: ") return msg
def __str__(self): msg = fmt('@_' + sanitize(self.name) + ':@|') if self.spec is not None: for key, val in self.spec.items(): msg += '\n ' + key for line in val.splitlines(): msg += '\n ' + line else: msg += '\n ' + self.prompt msg += '\n ' if self.default is None: msg += fmt(" @![@{yf}None@|@!]@|: ") else: msg += fmt(" @!['@{yf}" + sanitize(self.default) + "@|@!']@|: ") return msg
def main(args=None, get_subs_fn=None): get_subs_fn = get_subs_fn or get_subs _place_template_files = True _process_template_files = True package_path = os.getcwd() if args is not None: package_path = args.package_path or os.getcwd() _place_template_files = args.place_template_files _process_template_files = args.process_template_files pkgs_dict = find_packages(package_path) if len(pkgs_dict) == 0: sys.exit("No packages found in path: '{0}'".format(package_path)) if len(pkgs_dict) > 1: sys.exit("Multiple packages found, this tool only supports one package at a time.") ros_distro = os.environ.get('ROS_DISTRO', 'groovy') # Allow args overrides ros_distro = args.ros_distro or ros_distro # Summarize info(fmt("@!@{gf}==> @|") + fmt("Generating Homebrew formula for package(s) %s" % ([p.name for p in pkgs_dict.values()]))) for path, pkg in pkgs_dict.items(): template_files = None try: subs = get_subs_fn(pkg, ros_distro) if _place_template_files: # Place template files place_template_files(path) if _process_template_files: # Just process existing template files template_files = process_template_files(path, subs) if not _place_template_files and not _process_template_files: # If neither, do both place_template_files(path) template_files = process_template_files(path, subs) if template_files is not None: for template_file in template_files: os.remove(os.path.normpath(template_file)) except Exception as exc: debug(traceback.format_exc()) error(type(exc).__name__ + ": " + str(exc), exit=True) except (KeyboardInterrupt, EOFError): sys.exit(1)
def process_template_files(path, subs): info(fmt("@!@{bf}==>@| In place processing templates in 'rpm' folder.")) rpm_dir = os.path.join(path, 'rpm') if not os.path.exists(rpm_dir): sys.exit("No rpm directory found at '{0}', cannot process templates." .format(rpm_dir)) return __process_template_folder(rpm_dir, subs)
def commit(self): if self.disabled: return info(fmt("@{bf}<==@| Command successful, committing changes to working copy")) current_branch = get_current_branch() if current_branch is None: error("Could not determine current branch.", exit=True) with inbranch(get_commit_hash(get_current_branch())): with change_directory(self.clone_dir): new_branches = get_branches() for branch in self.current_branches: if branch in new_branches: new_branches.remove(branch) for branch in get_branches(local_only=True): if branch not in new_branches: with inbranch(branch): cmd = 'git pull --rebase origin ' + branch execute_command(cmd) execute_command('git push --all', silent=False) try: execute_command('git push --tags', silent=False) except subprocess.CalledProcessError: warning("Force pushing tags from clone to working repository, " "you will have to force push back to origin...") execute_command('git push --force --tags', silent=False) self.clean_up()
def commit(self): if self.disabled: return info( fmt("@{bf}<==@| Command successful, committing changes to working copy" )) current_branch = get_current_branch() if current_branch is None: error("Could not determine current branch.", exit=True) with inbranch(get_commit_hash(get_current_branch())): with change_directory(self.clone_dir): new_branches = get_branches() for branch in self.current_branches: if branch in new_branches: new_branches.remove(branch) for branch in get_branches(local_only=True): if branch not in new_branches: with inbranch(branch): cmd = 'git pull --rebase origin ' + branch execute_command(cmd) execute_command('git push --all', silent=False) try: execute_command('git push --tags', silent=False) except subprocess.CalledProcessError: warning( "Force pushing tags from clone to working repository, " "you will have to force push back to origin...") execute_command('git push --force --tags', silent=False) self.clean_up()
def maybe_continue(default='y', msg='Continue'): """Prompts the user for continuation""" default = default.lower() msg = "@!{msg} ".format(msg=sanitize(msg)) if default == 'y': msg += "@{yf}[Y/n]? @|" else: msg += "@{yf}[y/N]? @|" msg = fmt(msg) while True: response = safe_input(msg) if not response: response = default response = response.lower() if response not in ['y', 'n', 'q']: error_msg = 'Reponse `' + response + '` was not recognized, ' \ 'please use one of y, Y, n, N.' error(error_msg) else: break if response in ['n', 'q']: return False return True
def main(sysargs): parser = get_argument_parser() args = parser.parse_args(sys.argv[1:]) repository = args.repository verbose = args.verbose # checkout target rpository info("Manually clone the repository") info(" git clone {0}".format(repository)) git = get_vcs_client('git', tempfile.mktemp()) info(fmt("@{gf}@!==> @|") + "Fetching repository from '{0}'".format(repository)) ret = git.checkout(repository, verbose=verbose) if not ret: error("Could not checkout {}".format(repository)) return 1 # get the github repository info base_org, base_repo = get_gh_info(git.get_url()) # get correct repo info (case sensitive) gh = get_github_interface() base_org, base_repo = get_gh_info(gh.get_repo(base_org, base_repo)['html_url']) base_branch = git.get_branches()[0] # is this ok? with change_directory(git.get_path()): # write travis yaml write_travis_yaml() # write readme write_readme_md(**locals()) # create pull request open_pull_request(base_org=base_org, base_repo=base_repo, base_branch=base_branch, new_branch="add_travis")
def get_repo_uri(repository, distro): url = None # Fetch the distro file distribution_file = get_distribution_file(distro) if repository in distribution_file.repositories and \ distribution_file.repositories[repository].release_repository is not None: url = distribution_file.repositories[repository].release_repository.url else: error("Specified repository '{0}' is not in the distribution file located at '{1}'" .format(repository, get_disitrbution_file_url(distro))) matches = difflib.get_close_matches(repository, distribution_file.repositories) if matches: info(fmt("@{yf}Did you mean one of these: '" + "', '".join([m for m in matches]) + "'?")) if not url: info("Could not determine release repository url for repository '{0}' of distro '{1}'" .format(repository, distro)) info("You can continue the release process by manually specifying the location of the RELEASE repository.") info("To be clear this is the url of the RELEASE repository not the upstream repository.") try: url = safe_input('Release repository url [press enter to abort]: ') except (KeyboardInterrupt, EOFError): url = None info('', use_prefix=False) if not url: error("No release repository url given, aborting.", exit=True) global _user_provided_release_url _user_provided_release_url = url return url
def _my_run(cmd): info(fmt("@{bf}@!==> @|@!" + str(cmd))) # out = check_output(cmd, stderr=subprocess.STDOUT, shell=True) out = None from subprocess import call call(cmd, shell=True) if out: info(out, use_prefix=False)
def generate_ros_distro_diff(track, repository, distro, distro_file_url=ROS_DISTRO_FILE): distro_file_url = distro_file_url.format(distro) distro_file_raw = fetch_distro_file(distro_file_url) distro_file = yaml.load(distro_file_raw) with inbranch('upstream'): # Check for package.xml(s) try: from catkin_pkg.packages import find_packages except ImportError: error("catkin_pkg was not detected, please install it.", file=sys.stderr, exit=True) packages = find_packages(os.getcwd()) if len(packages) == 0: warning("No packages found, will not generate 'package: path' entries for rosdistro.") track_dict = get_tracks_dict_raw()['tracks'][track] last_version = track_dict['last_version'] release_inc = track_dict['release_inc'] distro_file['repositories'][repository]['version'] = '{0}-{1}'.format(last_version, release_inc) if packages and (len(packages) > 1 or packages.keys()[0] != '.'): distro_file['repositories'][repository]['packages'] = {} for path, package in packages.iteritems(): distro_file['repositories'][repository]['packages'][package.name] = path distro_file_name = distro_file_url.split('/')[-1] # distro_dump_orig = yaml.dump(distro_file_orig, indent=2, default_flow_style=False) distro_dump = yaml.dump(distro_file, indent=2, default_flow_style=False) udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(), fromfile=distro_file_name, tofile=distro_file_name) if udiff: info("Unified diff for the ROS distro file located at '{0}':".format(distro_file_url)) for line in udiff: if line.startswith('@@'): line = fmt('@{cf}' + line) if line.startswith('+'): if not line.startswith('+++'): line += '\n' line = fmt('@{gf}' + line) if line.startswith('-'): if not line.startswith('---'): line += '\n' line = fmt('@{rf}' + line) if line.startswith(' '): line += '\n' info(line, use_prefix=False, end='') else: warning("This release resulted in no changes to the ROS distro file...")
def place_template_files(path, gbp=False): info(fmt("@!@{bf}==>@| Placing templates files in the 'rpm' folder.")) rpm_path = os.path.join(path, 'rpm') # Create/Clean the rpm folder if not os.path.exists(rpm_path): os.makedirs(rpm_path) # Place template files group = 'bloom.generators.rpm' __place_template_folder(group, 'templates', rpm_path, gbp)
def get_release_repo(repository, distro): global _repositories url = get_repo_uri(repository, distro) if repository not in _repositories.values(): temp_dir = tempfile.mkdtemp() _repositories[repository] = get_vcs_client("git", temp_dir) info(fmt("@{gf}@!==> @|") + "Fetching '{0}' repository from '{1}'".format(repository, url)) _repositories[repository].checkout(url, "master") return _repositories[repository]
def place_template_files(path, gbp=False): info(fmt("@!@{bf}==>@| Placing templates files in the 'debian' folder.")) debian_path = os.path.join(path, 'debian') # Create/Clean the debian folder if not os.path.exists(debian_path): os.makedirs(debian_path) # Place template files group = 'bloom.generators.debian' __place_template_folder(group, 'templates', debian_path, gbp)
def metapackage_check(self, path, pkg): if pkg.is_metapackage(): try: metapackage.validate_metapackage(path, pkg) except metapackage.InvalidMetapackage as e: warning("Invalid metapackage:") warning(" %s\n" % str(e)) error(fmt("Refusing to release invalid metapackage '@|%s@{rf}@!', metapackage requirements:\n @|%s" % (pkg.name, metapackage.DEFINITION_URL)), exit=True)
def place_template_files(path): info(fmt("@!@{bf}==>@| Placing templates files in the 'arch' folder.")) arch_path = os.path.join(path, 'arch') # Create/Clean the arch folder if not os.path.exists(arch_path): os.makedirs(arch_path) # Place template files group = 'bloom.generators.arch' __place_template_folder(group, 'templates', arch_path)
def create_fork(org, repo, user, password): msg = "Creating fork: {0}:{1} => {2}:{1}".format(org, repo, user) info(fmt("@{bf}@!==> @|@!" + str(msg))) headers = {} headers["Authorization"] = "Basic {0}".format(base64.b64encode('{0}:{1}'.format(user, password))) conn = HTTPSConnection('api.github.com') conn.request('POST', '/repos/{0}/{1}/forks'.format(org, repo), json.dumps({}), headers) resp = conn.getresponse() if str(resp.status) != '202': error("Failed to create fork: {0} {1}".format(resp.status, resp.reason), exit=True)
def place_template_files(path, build_type, gbp=False): info(fmt("@!@{bf}==>@| Placing templates files in the 'rpm' folder.")) rpm_path = os.path.join(path, 'rpm') # Create/Clean the rpm folder if not os.path.exists(rpm_path): os.makedirs(rpm_path) # Place template files group = 'bloom.generators.rpm' templates = os.path.join('templates', build_type) __place_template_folder(group, templates, rpm_path, gbp)
def place_template_files(path, build_type, gbp=False): info(fmt("@!@{bf}==>@| Placing templates files in the 'debian' folder.")) debian_path = os.path.join(path, 'debian') # Remove the debian folder if it exist if os.path.exists(debian_path): shutil.rmtree(debian_path) # Place template files templates = os.path.join(os.curdir, os.path.join('templates', build_type)) shutil.copytree(os.path.join('templates', build_type), debian_path) if not gbp: os.remove(os.path.join(debian_path, 'gbp.conf.em'))
def get_release_repo(repository, distro): global _repositories url = get_repo_uri(repository, distro) if repository not in _repositories.values(): temp_dir = tempfile.mkdtemp() _repositories[repository] = get_vcs_client('git', temp_dir) info( fmt("@{gf}@!==> @|") + "Fetching '{0}' repository from '{1}'".format(repository, url)) _repositories[repository].checkout(url, 'master') return _repositories[repository]
def version_check(version): last_tag = get_last_tag_by_version() if not last_tag: return last_tag_version = last_tag.split('/')[-1] info(fmt("The latest upstream tag in the release repository is '@!{0}@|'." .format(last_tag))) # Ensure the new version is greater than the last tag if parse_version(version) < parse_version(last_tag_version): warning("""\ Version discrepancy: The upstream version '{0}' isn't newer than upstream version '{1}'. """.format(version, last_tag_version))
def create_fork(org, repo, user, password): msg = "Creating fork: {0}:{1} => {2}:{1}".format(org, repo, user) info(fmt("@{bf}@!==> @|@!" + str(msg))) headers = {} headers["Authorization"] = "Basic {0}".format( base64.b64encode('{0}:{1}'.format(user, password))) conn = httplib.HTTPSConnection('api.github.com') conn.request('POST', '/repos/{0}/{1}/forks'.format(org, repo), json.dumps({}), headers) resp = conn.getresponse() if str(resp.status) != '202': error("Failed to create fork: {0} {1}".format(resp.status, resp.reason), exit=True)
def get_repo_uri(repository, distro): url = None # Fetch the distro file distribution_file = get_distribution_file(distro) if repository in distribution_file.repositories and \ distribution_file.repositories[repository].release_repository is not None: url = distribution_file.repositories[repository].release_repository.url else: error("Specified repository '{0}' is not in the distribution file located at '{1}'" .format(repository, get_disitrbution_file_url(distro))) matches = difflib.get_close_matches(repository, distribution_file.repositories) if matches: info(fmt("@{yf}Did you mean one of these: '" + "', '".join([m for m in matches]) + "'?")) if url is None: info("Could not determine release repository url for repository '{0}' of distro '{1}'" .format(repository, distro)) info("You can continue the release process by manually specifying the location of the RELEASE repository.") info("To be clear this is the url of the RELEASE repository not the upstream repository.") info("For release repositories on github, you should provide the `https://` url which should end in `.git`.") while True: try: url = safe_input('Release repository url [press enter to abort]: ') except (KeyboardInterrupt, EOFError): url = None info('', use_prefix=False) if not url: url = None error("No release repository url given, aborting.", exit=True) break # If github.com address, validate it if url is not None and 'github.com' in url: valid_url = True if not url.endswith('.git') and not url.endswith('.git/'): valid_url = False warning("The release repository url you provided does not end in `.git`.") if not url.startswith('https://'): valid_url = False warning("The release repository url you provided is not a `https://` address.") if not valid_url: warning("Would you like to enter the address again?") if maybe_continue(): url = None continue else: info("Very well, the address '{url}' will be used as is.".format(**locals())) break break global _user_provided_release_url _user_provided_release_url = url return url
def __init__(self, directory=None, track_all=True): self.disabled = get_git_clone_state() if self.disabled: warning('Skipping transactional safety mechanism, be careful...') return self.tmp_dir = None self.directory = directory if directory is not None else os.getcwd() if get_root(directory) is None: raise RuntimeError("Provided directory, '" + str(directory) + "', is not a git repository") self.track_all = track_all if self.track_all: track_branches(directory=directory) self.current_branches = get_branches() self.tmp_dir = tempfile.mkdtemp() self.clone_dir = os.path.join(self.tmp_dir, 'clone') self.repo_url = 'file://' + os.path.abspath(self.directory) info(fmt("@!@{gf}+++@| Cloning working copy for safety")) execute_command('git clone ' + self.repo_url + ' ' + self.clone_dir)
def __init__(self, directory=None, track_all=True): self.disabled = get_git_clone_state() self.disabled_quiet = get_git_clone_state_quiet() if self.disabled: if not self.disabled_quiet: warning('Skipping transactional safety mechanism, be careful...') return self.tmp_dir = None self.directory = directory if directory is not None else os.getcwd() if get_root(directory) is None: raise RuntimeError("Provided directory, '" + str(directory) + "', is not a git repository") self.track_all = track_all if self.track_all: track_branches(directory=directory) self.current_branches = get_branches() self.tmp_dir = tempfile.mkdtemp() self.clone_dir = os.path.join(self.tmp_dir, 'clone') self.repo_url = 'file://' + os.path.abspath(self.directory) info(fmt("@!@{gf}+++@| Cloning working copy for safety")) execute_command('git clone ' + self.repo_url + ' ' + self.clone_dir)
def get_repo_uri(repository, distro): url = None # Fetch the distro file distribution_file = get_distribution_file(distro) if repository in distribution_file.repositories and \ distribution_file.repositories[repository].release_repository is not None: url = distribution_file.repositories[repository].release_repository.url else: error("Specified repository '{0}' is not in the distribution file located at '{1}'" .format(repository, get_disitrbution_file_url(distro))) matches = difflib.get_close_matches(repository, distribution_file.repositories) if matches: info(fmt("@{yf}Did you mean one of these: '" + "', '".join([m for m in matches]) + "'?")) if url is None: info("Could not determine release repository url for repository '{0}' of distro '{1}'" .format(repository, distro)) info("You can continue the release process by manually specifying the location of the RELEASE repository.") info("To be clear this is the url of the RELEASE repository not the upstream repository.") info("For release repositories on GitHub, you should provide the `https://` url which should end in `.git`.") info("Here is the url for a typical release repository on GitHub: https://github.com/ros-gbp/rviz-release.git") while True: try: url = safe_input('Release repository url [press enter to abort]: ') except (KeyboardInterrupt, EOFError): url = None info('', use_prefix=False) if not url: url = None error("No release repository url given, aborting.", exit=True) break if url is None: break # If github.com address, validate it if not validate_github_url(url, 'release'): continue break global _user_provided_release_url _user_provided_release_url = url return url
def main(sysargs): parser = get_argument_parser() args = parser.parse_args(sys.argv[1:]) repository = args.repository verbose = args.verbose # checkout target rpository info("Manually clone the repository") info(" git clone {0}".format(repository)) git = get_vcs_client('git', tempfile.mktemp()) info( fmt("@{gf}@!==> @|") + "Fetching repository from '{0}'".format(repository)) ret = git.checkout(repository, verbose=verbose) if not ret: error("Could not checkout {}".format(repository)) return 1 # get the github repository info base_org, base_repo = get_gh_info(git.get_url()) # get correct repo info (case sensitive) gh = get_github_interface() base_org, base_repo = get_gh_info( gh.get_repo(base_org, base_repo)['html_url']) base_branch = git.get_branches()[0] # is this ok? with change_directory(git.get_path()): # write travis yaml write_travis_yaml() # write readme write_readme_md(**locals()) # create pull request open_pull_request(base_org=base_org, base_repo=base_repo, base_branch=base_branch, new_branch="add_travis")
def get_repo_uri(repository, distro): url = None # Fetch the distro file release_file = get_release_file(distro) if repository in release_file.repositories: url = release_file.repositories[repository].url else: error( "Specified repository '{0}' is not in the release file located at '{1}'" .format(repository, get_release_file_url(distro))) matches = difflib.get_close_matches(repository, release_file.repositories) if matches: info( fmt("@{yf}Did you mean one of these: '" + "', '".join([m for m in matches]) + "'?")) if not url: info( "Could not determine release repository url for repository '{0}' of distro '{1}'" .format(repository, distro)) info( "You can continue the release process by manually specifying the location of the RELEASE repository." ) info( "To be clear this is the url of the RELEASE repository not the upstream repository." ) try: url = raw_input('Release repository url [press enter to abort]: ') except (KeyboardInterrupt, EOFError): url = None info('', use_prefix=False) if not url: error("No release repository url given, aborting.", exit=True) global _user_provided_release_url _user_provided_release_url = url return url
def generate_ros_distro_diff(track, repository, distro): release_dict = get_release_file(distro).get_data() # Get packages packages = get_packages() if len(packages) == 0: warning("No packages found, will not generate 'package: path' entries for rosdistro.") # Get version track_dict = get_tracks_dict_raw()["tracks"][track] last_version = track_dict["last_version"] release_inc = track_dict["release_inc"] version = "{0}-{1}".format(last_version, release_inc) # Create a repository if there isn't already one if repository not in release_dict["repositories"]: global _user_provided_release_url release_dict["repositories"][repository] = {"url": _user_provided_release_url} # Update the repository repo = release_dict["repositories"][repository] if "tags" not in repo: repo["tags"] = {} repo["tags"]["release"] = "release/%s/{package}/{version}" % distro repo["version"] = version if "packages" not in repo: repo["packages"] = {} for path, pkg in packages.items(): if pkg.name not in repo["packages"]: repo["packages"][pkg.name] = {} repo["packages"][pkg.name]["subfolder"] = path # This will be shortened # Remove any missing packages for pkg_name in dict(repo["packages"]): if pkg_name not in [p.name for p in packages.values()]: if pkg_name in repo["packages"]: del repo["packages"][pkg_name] # Do the diff distro_file_name = get_relative_release_file_path(distro) updated_release_file = rosdistro.ReleaseFile("distro", release_dict) distro_dump = yaml_from_release_file(updated_release_file) distro_file_raw = load_url_to_file_handle(get_release_file_url(distro)).read() if distro_file_raw != distro_dump: udiff = difflib.unified_diff( distro_file_raw.splitlines(), distro_dump.splitlines(), fromfile=distro_file_name, tofile=distro_file_name ) temp_dir = tempfile.mkdtemp() udiff_file = os.path.join(temp_dir, repository + "-" + version + ".patch") udiff_raw = "" info("Unified diff for the ROS distro file located at '{0}':".format(udiff_file)) for line in udiff: if line.startswith("@@"): udiff_raw += line line = fmt("@{cf}" + sanitize(line)) if line.startswith("+"): if not line.startswith("+++"): line += "\n" udiff_raw += line line = fmt("@{gf}" + sanitize(line)) if line.startswith("-"): if not line.startswith("---"): line += "\n" udiff_raw += line line = fmt("@{rf}" + sanitize(line)) if line.startswith(" "): line += "\n" udiff_raw += line info(line, use_prefix=False, end="") with open(udiff_file, "w+") as f: f.write(udiff_raw) return updated_release_file else: warning("This release resulted in no changes to the ROS distro file...") return None
def perform_release(repository, track, distro, new_track, interactive, pretend, pull_request_only): release_repo = get_release_repo(repository, distro) with change_directory(release_repo.get_path()): # Check to see if the old bloom.conf exists if check_for_bloom_conf(repository): # Convert to a track info("Old bloom.conf file detected.") info(fmt("@{gf}@!==> @|Converting to bloom.conf to track")) convert_old_bloom_conf(None if new_track else distro) upconvert_bloom_to_config_branch() # Check that the track is valid tracks_dict = get_tracks_dict_raw() # If new_track, create the new track first if new_track: if not track: error("You must specify a track when creating a new one.", exit=True) if track in tracks_dict['tracks']: warning("Track '{0}' exists, editing...".format(track)) edit_track_cmd(track) tracks_dict = get_tracks_dict_raw() else: # Create a new track called <track>, # copying an existing track if possible, # and overriding the ros_distro warning("Creating track '{0}'...".format(track)) overrides = {'ros_distro': distro} new_track_cmd(track, copy_track='', overrides=overrides) tracks_dict = get_tracks_dict_raw() if track and track not in tracks_dict['tracks']: error("Given track '{0}' does not exist in release repository." .format(track)) error("Available tracks: " + str(tracks_dict['tracks'].keys()), exit=True) elif not track: tracks = tracks_dict['tracks'].keys() # Error out if there are no tracks if len(tracks) == 0: error("Release repository has no tracks.") info("Manually clone the repository:") info(" git clone {0}".format(release_repo.get_url())) info("And then create a new track:") info(" git-bloom-config new <track name>") error("Run again after creating a track.", exit=True) # Error out if there is more than one track if len(tracks) != 1: error("No track specified and there is not just one track.") error("Please specify one of the available tracks: " + str(tracks), exit=True) # Get the only track track = tracks[0] start_summary(track) if not pull_request_only: _perform_release(repository, track, distro, new_track, interactive, pretend, tracks_dict) # Propose github pull request info(fmt("@{gf}@!==> @|") + "Generating pull request to distro file located at '{0}'" .format(get_disitrbution_file_url(distro))) try: pull_request_url = open_pull_request(track, repository, distro) if pull_request_url: info(fmt(_success) + "Pull request opened at: {0}".format(pull_request_url)) if 'BLOOM_NO_WEBBROWSER' not in os.environ and platform.system() in ['Darwin']: webbrowser.open(pull_request_url) else: info("The release of your packages was successful, but the pull request failed.") info("Please manually open a pull request by editing the file here: '{0}'" .format(get_disitrbution_file_url(distro))) info(fmt(_error) + "No pull request opened.") except Exception as e: debug(traceback.format_exc()) error("Failed to open pull request: {0} - {1}".format(type(e).__name__, e), exit=True)
def _perform_release(repository, track, distro, new_track, interactive, pretend, tracks_dict): # Ensure the track is complete track_dict = tracks_dict['tracks'][track] track_dict = update_track(track_dict) tracks_dict['tracks'][track] = track_dict # Set the release repositories' remote if given release_repo_url = track_dict.get('release_repo_url', None) if release_repo_url is not None: info(fmt("@{gf}@!==> @|") + "Setting release repository remote url to '{0}'" .format(release_repo_url)) cmd = 'git remote set-url origin ' + release_repo_url info(fmt("@{bf}@!==> @|@!") + str(cmd)) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Setting the remote url failed, exiting.", exit=True) # Check for push permissions try: info(fmt( "@{gf}@!==> @|Testing for push permission on release repository" )) cmd = 'git remote -v' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) # Dry run will authenticate, but not push cmd = 'git push --dry-run' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Write the track config before releasing write_tracks_dict_raw(tracks_dict) # Run the release info(fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'" .format(repository, track)) cmd = 'git-bloom-release ' + str(track) if pretend: cmd += ' --pretend' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info(fmt(_success) + "Released '{0}' using release track '{1}' successfully" .format(repository, track)) # Commit the summary update_summary(track, repository, distro) commit_summary() # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info(fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'" .format(repository)) cmd = 'git push --all' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --all'?") if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info(fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'" .format(repository)) cmd = 'git push --tags' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --tags'?") if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully")
def open_pull_request(track, repository, distro): # Get the diff distribution_file = get_distribution_file(distro) if repository in distribution_file.repositories and \ distribution_file.repositories[repository].release_repository is not None: orig_version = distribution_file.repositories[repository].release_repository.version else: orig_version = None updated_distribution_file = generate_ros_distro_diff(track, repository, distro) if updated_distribution_file is None: # There were no changes, no pull request required return None version = updated_distribution_file.repositories[repository].release_repository.version updated_distro_file_yaml = yaml_from_distribution_file(updated_distribution_file) # Determine if the distro file is hosted on github... base_org, base_repo, base_branch, base_path = get_gh_info(get_disitrbution_file_url(distro)) if None in [base_org, base_repo, base_branch, base_path]: warning("Automated pull request only available via github.com") return # Get the github interface gh = get_github_interface() # Determine the head org/repo for the pull request head_org = gh.username # The head org will always be gh user head_repo = None # Check if the github user and the base org are the same if gh.username == base_org: # If it is, then a fork is not necessary head_repo = base_repo else: info(fmt("@{bf}@!==> @|@!Checking on github for a fork to make the pull request from...")) # It is not, so a fork will be required # Check if a fork already exists on the user's account with the same name base_full_name = '{base_org}/{base_repo}'.format(**locals()) try: repo_data = gh.get_repo(gh.username, base_repo) if repo_data.get('fork', False): # Check if it is a fork # If it is, check that it is a fork of the destination parent = repo_data.get('parent', {}).get('full_name', None) if parent == base_full_name: # This is a valid fork head_repo = base_repo except GithubException as exc: debug("Received GithubException while checking for fork: {exc}".format(**locals())) pass # 404 or unauthorized, but unauthorized should have been caught above # If not head_repo, then either the fork has a different name, or there isn't one if head_repo is None: info(fmt("@{bf}@!==> @|@!" + "{head_org}/{base_repo} is not a fork, searching...".format(**locals()))) # First we should look at every repository for the user and see if they are a fork user_repos = gh.list_repos(gh.username) for repo in user_repos: # If it is a fork and the parent is base_org/base_repo if repo.get('fork', False) and repo.get('parent', {}).get('full_name', '') == base_full_name: # Then this is a valid fork head_repo = repo['name'] # If not head_repo still, a fork does not exist and must be created if head_repo is None: warning("Could not find a fork of {base_full_name} on the {gh.username} Github account." .format(**locals())) warning("Would you like to create one now?") if not maybe_continue(): warning("Skipping the pull request...") return # Create a fork try: gh.create_fork(base_org, base_repo) # Will raise if not successful head_repo = base_repo except GithubException as exc: error("Aborting pull request: {0}".format(exc)) return info(fmt("@{bf}@!==> @|@!" + "Using this fork to make a pull request from: {head_org}/{head_repo}".format(**locals()))) # Clone the fork info(fmt("@{bf}@!==> @|@!" + "Cloning {0}/{1}...".format(head_org, head_repo))) new_branch = None title = "{0}: {1} in '{2}' [bloom]".format(repository, version, base_path) body = """\ Increasing version of package(s) in repository `{0}` to `{2}`: - distro file: `{3}` - bloom version: `{4}` - previous version for package: `{1}` """.format(repository, orig_version or 'null', version, base_path, bloom.__version__) body += get_changelog_summary(generate_release_tag(distro)) with temporary_directory() as temp_dir: def _my_run(cmd, msg=None): if msg: info(fmt("@{bf}@!==> @|@!" + sanitize(msg))) else: info(fmt("@{bf}@!==> @|@!" + sanitize(str(cmd)))) from subprocess import check_call check_call(cmd, shell=True) # Use the oauth token to clone rosdistro_url = 'https://{gh.token}:[email protected]/{base_org}/{base_repo}.git'.format(**locals()) rosdistro_fork_url = 'https://{gh.token}:[email protected]/{head_org}/{head_repo}.git'.format(**locals()) _my_run('mkdir -p {base_repo}'.format(**locals())) with change_directory(base_repo): _my_run('git init') branches = [x['name'] for x in gh.list_branches(head_org, head_repo)] new_branch = 'bloom-{repository}-{count}' count = 0 while new_branch.format(repository=repository, count=count) in branches: count += 1 new_branch = new_branch.format(repository=repository, count=count) # Final check info(fmt("@{cf}Pull Request Title: @{yf}" + title)) info(fmt("@{cf}Pull Request Body : \n@{yf}" + body)) msg = fmt("@!Open a @|@{cf}pull request@| @!@{kf}from@| @!'@|@!@{bf}" + "{head_repo}/{head_repo}:{new_branch}".format(**locals()) + "@|@!' @!@{kf}into@| @!'@|@!@{bf}" + "{base_org}/{base_repo}:{base_branch}".format(**locals()) + "@|@!'?") info(msg) if not maybe_continue(): warning("Skipping the pull request...") return _my_run('git checkout -b {new_branch}'.format(**locals())) _my_run('git pull {rosdistro_url} {base_branch}'.format(**locals()), "Pulling latest rosdistro branch") with open('{0}'.format(base_path), 'w') as f: info(fmt("@{bf}@!==> @|@!Writing new distribution file: ") + str(base_path)) f.write(updated_distro_file_yaml) _my_run('git add {0}'.format(base_path)) _my_run('git commit -m "{0}"'.format(title)) _my_run('git push {rosdistro_fork_url} {new_branch}'.format(**locals()), "Pushing changes to fork") # Open the pull request return gh.create_pull_request(base_org, base_repo, base_branch, head_org, new_branch, title, body)
def open_pull_request(track, repository, distro): # Get the diff release_file = get_release_file(distro) if repository in release_file.repositories: orig_version = release_file.repositories[repository].version else: orig_version = None updated_release_file = generate_ros_distro_diff(track, repository, distro) if updated_release_file is None: # There were no changes, no pull request required return None version = updated_release_file.repositories[repository].version updated_distro_file = yaml_from_release_file(updated_release_file) # Determine if the distro file is hosted on github... gh_org, gh_repo, gh_branch, gh_path = get_gh_info( get_release_file_url(distro)) if None in [gh_org, gh_repo, gh_branch, gh_path]: warning("Automated pull request only available via github.com") return # Get the github user name gh_username = None bloom_user_path = os.path.join(os.path.expanduser('~'), '.bloom_user') if os.path.exists(bloom_user_path): with open(bloom_user_path, 'r') as f: gh_username = f.read().strip() gh_username = gh_username or getpass.getuser() response = raw_input("github user name [{0}]: ".format(gh_username)) if response: gh_username = response info( "Would you like bloom to store your github user name (~/.bloom_user)?" ) if maybe_continue(): with open(bloom_user_path, 'w') as f: f.write(gh_username) else: with open(bloom_user_path, 'w') as f: f.write(' ') warning( "If you want to have bloom store it in the future remove the ~/.bloom_user file." ) # Get the github password gh_password = getpass.getpass("github password (This is not stored):") if not gh_password or not gh_username: error("Either the github username or github password is not set.") warning("Skipping the pull request...") return # Check for fork info(fmt("@{bf}@!==> @|@!Checking for rosdistro fork on github...")) gh_user_repos = fetch_github_api( 'https://api.github.com/users/{0}/repos'.format(gh_username), use_pagination=True) if gh_user_repos is None: error("Failed to get a list of repositories for user: '******'".format( gh_username)) warning("Skipping the pull request...") return if 'rosdistro' not in [x['name'] for x in gh_user_repos if 'name' in x]: warning( "Github user '{0}' does not have a fork ".format(gh_username) + "of the {0}:{1} repository, create one?".format(gh_org, gh_repo)) if not maybe_continue(): warning("Skipping the pull request...") return # Create a fork create_fork(gh_org, gh_repo, gh_username, gh_password) # Clone the fork info( fmt("@{bf}@!==> @|@!" + "Cloning {0}/{1}...".format(gh_username, gh_repo))) temp_dir = tempfile.mkdtemp() new_branch = None title = "{0}: {1} in '{2}' [bloom]".format(repository, version, gh_path) body = """\ Increasing version of package(s) in repository `{0}`: - previous version: `{1}` - new version: `{2}` - distro file: `{3}` - bloom version: `{4}` """.format(repository, orig_version or 'null', version, gh_path, bloom.__version__) with change_directory(temp_dir): def _my_run(cmd): info(fmt("@{bf}@!==> @|@!" + str(cmd))) # out = check_output(cmd, stderr=subprocess.STDOUT, shell=True) out = None from subprocess import call call(cmd, shell=True) if out: info(out, use_prefix=False) _my_run('git clone https://github.com/{0}/{1}.git'.format( gh_username, gh_repo)) with change_directory(gh_repo): _my_run( 'git remote add bloom https://github.com/{0}/{1}.git'.format( gh_org, gh_repo)) _my_run('git remote update') _my_run('git fetch') track_branches() branches = get_branches() new_branch = 'bloom-{repository}-{count}' count = 0 while new_branch.format(repository=repository, count=count) in branches: count += 1 new_branch = new_branch.format(repository=repository, count=count) # Final check info(fmt("@{cf}Pull Request Title: @{yf}" + title)) info(fmt("@{cf}Pull Request Body : \n@{yf}" + body)) msg = fmt( "@!Open a @|@{cf}pull request@| @!@{kf}from@| @!'@|@!@{bf}" + "{gh_username}/{gh_repo}:{new_branch}".format(**locals()) + "@|@!' @!@{kf}into@| @!'@|@!@{bf}" + "{gh_org}/{gh_repo}:{gh_branch}".format(**locals()) + "@|@!'?") info(msg) if not maybe_continue(): warning("Skipping the pull request...") return _my_run('git checkout -b {0} bloom/{1}'.format( new_branch, gh_branch)) with open('{0}'.format(gh_path), 'w') as f: info( fmt("@{bf}@!==> @|@!Writing new distribution file: ") + str(gh_path)) f.write(updated_distro_file) _my_run('git add {0}'.format(gh_path)) _my_run('git commit -m "{0}"'.format(title)) _my_run('git push origin {0}'.format(new_branch)) # Open the pull request return create_pull_request(gh_org, gh_repo, gh_username, gh_password, gh_branch, new_branch, title, body)
def open_pull_request(base_org, base_repo, base_branch, new_branch="add_travis", title="update travis.yml"): def _my_run(cmd, msg=None): if msg: info(fmt("@{bf}@!==> @|@!" + sanitize(msg))) else: info(fmt("@{bf}@!==> @|@!" + sanitize(str(cmd)))) from subprocess import check_call check_call(cmd, shell=True) # get ghe github interface gh = get_github_interface() # if gh is None: # return None head_org = gh.username # The head org will always be gh user head_repo = None # Check if a fork already exists on the user's account repo_forks = gh.list_forks(base_org, base_repo) user_forks = [ r for r in repo_forks if r.get('owner', {}).get('login', '') == gh.username ] # github allows only 1 fork per org as far as I know. We just take the first one. head_repo = user_forks[0] if user_forks else None if head_repo is None: error( "Could not find a fork of {base_org}/{base_repo} on the {gh.username} GitHub account." .format(**locals())) error("Please create fork repository manually", exit=True) return 1 head_repo = head_repo.get('name', '') if new_branch in [ x['name'] for x in gh.list_branches(head_org, head_repo) ]: error("Please remove {new_branch} in {head_org}/{head_repo} manually". format(**locals())) return 1 target_url = 'https://{gh.token}:[email protected]/{base_org}/{base_repo}.git'.format( **locals()) target_fork_url = 'https://{gh.token}:[email protected]/{head_org}/{head_repo}.git'.format( **locals()) _my_run('git --no-pager diff'.format(**locals())) _my_run('git checkout -b {new_branch}'.format(**locals())) _my_run('git add .travis.yml README.md'.format(**locals())) _my_run('git commit -m "{title}"'.format(**locals())) _my_run('git push {target_fork_url} {new_branch}'.format(**locals()), "Pushing changes to fork") # Open the pull request body = ''' Created travis.yml using - https://github.com/ros-infrastructure/ros_buildfarm/blob/master/doc/jobs/devel_jobs.rst - https://github.com/ros-infrastructure/ros_buildfarm/blob/master/doc/jobs/prerelease_jobs.rst Please activate GitHub - TravisCI integration to enable this test https://github.com/apps/travis-ci ''' info( fmt("@{bf}@!==> @|@!" + "Using this fork to make a pull request from: {head_org}/{head_repo}" .format(**locals()))) try: pull_request_url = gh.create_pull_request(base_org, base_repo, base_branch, head_org, new_branch, title, body) if pull_request_url: info( fmt(_success) + "Pull request opened at: {0}".format(pull_request_url)) else: info( "The release of your packages was successful, but the pull request failed." ) info( "Please manually open a pull request by editing the file here: '{0}'" .format(get_distribution_file_url(distro))) info(fmt(_error) + "No pull request opened.") except Exception as e: debug(traceback.format_exc()) error("Failed to open pull request: {0} - {1}".format( type(e).__name__, e), exit=True) return 0
def main(args=None, get_subs_fn=None): get_subs_fn = get_subs_fn or get_subs _place_template_files = True _process_template_files = True package_path = os.getcwd() skip_package_names = None if args is not None: package_path = args.package_path or os.getcwd() _place_template_files = args.place_template_files _process_template_files = args.process_template_files # create dependency-skip package name lists skip_name_path = args.skip_package_names or '' if os.path.isfile(os.path.abspath(skip_name_path)): with open(os.path.abspath(skip_name_path), 'r') as f: skip_package_names = [] for line in f: # comment string if line[:1] == '#': continue # add lists skip_package_names.append(line.strip()) pkgs_dict = find_packages(package_path) if len(pkgs_dict) == 0: sys.exit("No packages found in path: '{0}'".format(package_path)) if len(pkgs_dict) > 1: sys.exit("Multiple packages found, " "this tool only supports one package at a time.") os_data = create_default_installer_context().get_os_name_and_version() os_name, os_version = os_data ros_distro = os.environ.get('ROS_DISTRO', 'indigo') # Allow args overrides os_name = args.os_name or os_name os_version = args.os_version or os_version ros_distro = args.ros_distro or ros_distro # Summarize info( fmt("@!@{gf}==> @|") + fmt("Generating debs for @{cf}%s:%s@| for package(s) %s" % (os_name, os_version, [p.name for p in pkgs_dict.values()]))) for path, pkg in pkgs_dict.items(): template_files = None try: subs = get_subs_fn(pkg, os_name, os_version, ros_distro, args.native, skip_package_names) if _place_template_files: # Place template files place_template_files(path, pkg.get_build_type()) if _process_template_files: # Just process existing template files template_files = process_template_files(path, subs) if not _place_template_files and not _process_template_files: # If neither, do both place_template_files(path, pkg.get_build_type()) template_files = process_template_files(path, subs) if template_files is not None: for template_file in template_files: os.remove(os.path.normpath(template_file)) except Exception as exc: debug(traceback.format_exc()) error(type(exc).__name__ + ": " + str(exc), exit=True) except (KeyboardInterrupt, EOFError): sys.exit(1)
def perform_release(repository, track, distro, new_track, interactive, pretend, ssh_pull_request): release_repo = get_release_repo(repository, distro) with change_directory(release_repo.get_path()): # Check to see if the old bloom.conf exists if check_for_bloom_conf(repository): # Convert to a track info("Old bloom.conf file detected.") info(fmt("@{gf}@!==> @|Converting to bloom.conf to track")) convert_old_bloom_conf(None if new_track else distro) upconvert_bloom_to_config_branch() # Check that the track is valid tracks_dict = get_tracks_dict_raw() # If new_track, create the new track first if new_track: if not track: error("You must specify a track when creating a new one.", exit=True) if track in tracks_dict['tracks']: warning("Track '{0}' exists, editing...".format(track)) edit_track_cmd(track) tracks_dict = get_tracks_dict_raw() else: # Create a new track called <track>, # copying an existing track if possible, # and overriding the ros_distro warning("Creating track '{0}'...".format(track)) overrides = {'ros_distro': distro} new_track_cmd(track, copy_track='', overrides=overrides) tracks_dict = get_tracks_dict_raw() if track and track not in tracks_dict['tracks']: error("Given track '{0}' does not exist in release repository." .format(track)) error("Available tracks: " + str(tracks_dict['tracks'].keys()), exit=True) elif not track: tracks = tracks_dict['tracks'].keys() # Error out if there are no tracks if len(tracks) == 0: error("Release repository has no tracks.") info("Manually clone the repository:") info(" git clone {0}".format(release_repo.get_url())) info("And then create a new track:") info(" git-bloom-config new <track name>") error("Run again after creating a track.", exit=True) # Error out if there is more than one track if len(tracks) != 1: error("No track specified and there is not just one track.") error("Please specify one of the available tracks: " + str(tracks), exit=True) # Get the only track track = tracks[0] start_summary(track) # Ensure the track is complete track_dict = tracks_dict['tracks'][track] track_dict = update_track(track_dict) tracks_dict['tracks'][track] = track_dict # Set the release repositories' remote if given release_repo_url = track_dict.get('release_repo_url', None) if release_repo_url is not None: info(fmt("@{gf}@!==> @|") + "Setting release repository remote url to '{0}'" .format(release_repo_url)) cmd = 'git remote set-url origin ' + release_repo_url info(fmt("@{bf}@!==> @|@!") + str(cmd)) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Setting the remote url failed, exiting.", exit=True) # Check for push permissions try: info(fmt( "@{gf}@!==> @|Testing for push permission on release repository" )) cmd = 'git remote -v' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) # Dry run will authenticate, but not push cmd = 'git push --dry-run' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Write the track config before releasing write_tracks_dict_raw(tracks_dict) # Run the release info(fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'" .format(repository, track)) cmd = 'git-bloom-release ' + str(track) if pretend: cmd += ' --pretend' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info(fmt(_success) + "Released '{0}' using release track '{1}' successfully" .format(repository, track)) # Commit the summary update_summary(track, repository, distro) commit_summary() # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info(fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'" .format(repository)) cmd = 'git push --all' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --all'?") if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info(fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'" .format(repository)) cmd = 'git push --tags' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --tags'?") if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully") # Propose github pull request info(fmt("@{gf}@!==> @|") + "Generating pull request to distro file located at '{0}'" .format(get_disitrbution_file_url(distro))) try: pull_request_url = open_pull_request(track, repository, distro, ssh_pull_request) if pull_request_url: info(fmt(_success) + "Pull request opened at: {0}".format(pull_request_url)) if 'BLOOM_NO_WEBBROWSER' in os.environ and platform.system() not in ['Darwin']: webbrowser.open(pull_request_url) else: info("The release of your packages was successful, but the pull request failed.") info("Please manually open a pull request by editing the file here: '{0}'" .format(get_disitrbution_file_url(distro))) info(fmt(_error) + "No pull request opened.") except Exception as e: debug(traceback.format_exc()) error("Failed to open pull request: {0} - {1}".format(type(e).__name__, e), exit=True)
def generate_ros_distro_diff(track, repository, distro): release_dict = get_release_file(distro).get_data() # Get packages packages = get_packages() if len(packages) == 0: warning( "No packages found, will not generate 'package: path' entries for rosdistro." ) # Get version track_dict = get_tracks_dict_raw()['tracks'][track] last_version = track_dict['last_version'] release_inc = track_dict['release_inc'] version = '{0}-{1}'.format(last_version, release_inc) # Create a repository if there isn't already one if repository not in release_dict['repositories']: global _user_provided_release_url release_dict['repositories'][repository] = { 'url': _user_provided_release_url } # Update the repository repo = release_dict['repositories'][repository] if 'tags' not in repo: repo['tags'] = {} repo['tags']['release'] = 'release/%s/{package}/{version}' % distro repo['version'] = version if 'packages' not in repo: repo['packages'] = {} for path, pkg in packages.items(): if pkg.name not in repo['packages']: repo['packages'][pkg.name] = {} repo['packages'][ pkg.name]['subfolder'] = path # This will be shortened # Remove any missing packages for pkg_name in dict(repo['packages']): if pkg_name not in [p.name for p in packages.values()]: if pkg_name in repo['packages']: del repo['packages'][pkg_name] # Do the diff distro_file_name = get_relative_release_file_path(distro) updated_release_file = rosdistro.ReleaseFile('distro', release_dict) distro_dump = yaml_from_release_file(updated_release_file) distro_file_raw = load_url_to_file_handle( get_release_file_url(distro)).read() if distro_file_raw != distro_dump: udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(), fromfile=distro_file_name, tofile=distro_file_name) temp_dir = tempfile.mkdtemp() udiff_file = os.path.join(temp_dir, repository + '-' + version + '.patch') udiff_raw = '' info("Unified diff for the ROS distro file located at '{0}':".format( udiff_file)) for line in udiff: if line.startswith('@@'): udiff_raw += line line = fmt('@{cf}' + sanitize(line)) if line.startswith('+'): if not line.startswith('+++'): line += '\n' udiff_raw += line line = fmt('@{gf}' + sanitize(line)) if line.startswith('-'): if not line.startswith('---'): line += '\n' udiff_raw += line line = fmt('@{rf}' + sanitize(line)) if line.startswith(' '): line += '\n' udiff_raw += line info(line, use_prefix=False, end='') with open(udiff_file, 'w+') as f: f.write(udiff_raw) return updated_release_file else: warning( "This release resulted in no changes to the ROS distro file...") return None
def execute_track(track, track_dict, release_inc, pretend=True, debug=False, fast=False): info("Processing release track settings for '{0}'".format(track)) settings = process_track_settings(track_dict, release_inc) # setup extra settings archive_dir_path = tempfile.mkdtemp() settings['archive_dir_path'] = archive_dir_path if settings['release_tag'] != ':{none}': archive_file = '{name}-{release_tag}.tar.gz'.format(**settings) else: archive_file = '{name}.tar.gz'.format(**settings) settings['archive_path'] = os.path.join(archive_dir_path, archive_file) # execute actions info("", use_prefix=False) info("Executing release track '{0}'".format(track)) for action in track_dict['actions']: if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar': warning("Explicitly skipping bloom-export-upstream for tar.") settings['archive_path'] = settings['vcs_uri'] continue templated_action = template_str(action, settings) info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action)))) if pretend: continue stdout = None stderr = None if bloom.util._quiet: stdout = subprocess.PIPE stderr = subprocess.STDOUT if debug and 'DEBUG' not in os.environ: os.environ['DEBUG'] = '1' if fast and 'BLOOM_UNSAFE' not in os.environ: os.environ['BLOOM_UNSAFE'] = '1' templated_action = templated_action.split() templated_action[0] = find_full_path(templated_action[0]) p = subprocess.Popen(templated_action, stdout=stdout, stderr=stderr, shell=False, env=os.environ.copy()) out, err = p.communicate() if bloom.util._quiet: info(out, use_prefix=False) ret = p.returncode if ret > 0: if 'bloom-generate' in templated_action[ 0] and ret == code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO: error( fmt(_error + "The following generator action reported that it is missing one or more" )) error( fmt(" @|rosdep keys, but that the key exists in other platforms:" )) error(fmt("@|'@!{0}'@|").format(templated_action)) info('', use_prefix=False) error( fmt("@|If you are @!@_@{rf}absolutely@| sure that this key is unavailable for the platform in" )) error( fmt("@|question, the generator can be skipped and you can proceed with the release." )) if maybe_continue( 'n', 'Skip generator action and continue with release'): info("\nAction skipped, continuing with release.\n") continue info('', use_prefix=False) error(fmt(_error + "Error running command '@!{0}'@|").format( templated_action), exit=True) info('', use_prefix=False) if not pretend: # Update the release_inc tracks_dict = get_tracks_dict_raw() tracks_dict['tracks'][track]['release_inc'] = settings['release_inc'] tracks_dict['tracks'][track]['last_version'] = settings['version'] # if release tag is set to ask and a custom value is used if settings['version'] != settings['release_tag']: tracks_dict['tracks'][track]['last_release'] = settings[ 'release_tag'] write_tracks_dict_raw( tracks_dict, 'Updating release inc to: ' + str(settings['release_inc']))
def generate_ros_distro_diff(track, repository, distro): distribution_dict = get_distribution_file(distro).get_data() # Get packages packages = get_packages() if len(packages) == 0: warning("No packages found, will not generate 'package: path' entries for rosdistro.") # Get version track_dict = get_tracks_dict_raw()['tracks'][track] last_version = track_dict['last_version'] release_inc = track_dict['release_inc'] version = '{0}-{1}'.format(last_version, release_inc) # Create a repository if there isn't already one if repository not in distribution_dict['repositories']: global _user_provided_release_url distribution_dict['repositories'][repository] = {} # Create a release entry if there isn't already one if 'release' not in distribution_dict['repositories'][repository]: distribution_dict['repositories'][repository]['release'] = { 'url': _user_provided_release_url } # Update the repository repo = distribution_dict['repositories'][repository]['release'] if 'tags' not in repo: repo['tags'] = {} repo['tags']['release'] = generate_release_tag(distro) repo['version'] = version if 'packages' not in repo: repo['packages'] = [] for path, pkg in packages.items(): if pkg.name not in repo['packages']: repo['packages'].append(pkg.name) # Remove any missing packages packages_being_released = [p.name for p in packages.values()] for pkg_name in list(repo['packages']): if pkg_name not in packages_being_released: repo['packages'].remove(pkg_name) repo['packages'].sort() # Do the diff distro_file_name = get_relative_distribution_file_path(distro) updated_distribution_file = rosdistro.DistributionFile(distro, distribution_dict) distro_dump = yaml_from_distribution_file(updated_distribution_file) distro_file_raw = load_url_to_file_handle(get_disitrbution_file_url(distro)).read() if distro_file_raw != distro_dump: # Calculate the diff udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(), fromfile=distro_file_name, tofile=distro_file_name) temp_dir = tempfile.mkdtemp() udiff_file = os.path.join(temp_dir, repository + '-' + version + '.patch') udiff_raw = '' info("Unified diff for the ROS distro file located at '{0}':".format(udiff_file)) for line in udiff: if line.startswith('@@'): udiff_raw += line line = fmt('@{cf}' + sanitize(line)) if line.startswith('+'): if not line.startswith('+++'): line += '\n' udiff_raw += line line = fmt('@{gf}' + sanitize(line)) if line.startswith('-'): if not line.startswith('---'): line += '\n' udiff_raw += line line = fmt('@{rf}' + sanitize(line)) if line.startswith(' '): line += '\n' udiff_raw += line info(line, use_prefix=False, end='') # Assert that only this repository is being changed distro_file_yaml = yaml.load(distro_file_raw) distro_yaml = yaml.load(distro_dump) if 'repositories' in distro_file_yaml: distro_file_repos = distro_file_yaml['repositories'] for repo in distro_yaml['repositories']: if repo == repository: continue if repo not in distro_file_repos or distro_file_repos[repo] != distro_yaml['repositories'][repo]: error("This generated pull request modifies a repository entry other than the one being released.") error("This likely occured because the upstream rosdistro changed during this release.") error("This pull request will abort, please re-run this command with the -p option to try again.", exit=True) # Write the diff out to file with open(udiff_file, 'w+') as f: f.write(udiff_raw) # Return the diff return updated_distribution_file else: warning("This release resulted in no changes to the ROS distro file...") return None
def open_pull_request(track, repository, distro): # Get the diff release_file = get_release_file(distro) if repository in release_file.repositories: orig_version = release_file.repositories[repository].version else: orig_version = None updated_release_file = generate_ros_distro_diff(track, repository, distro) if updated_release_file is None: # There were no changes, no pull request required return None version = updated_release_file.repositories[repository].version updated_distro_file = yaml_from_release_file(updated_release_file) # Determine if the distro file is hosted on github... gh_org, gh_repo, gh_branch, gh_path = get_gh_info(get_release_file_url(distro)) if None in [gh_org, gh_repo, gh_branch, gh_path]: warning("Automated pull request only available via github.com") return # Get the github user name gh_username = None bloom_user_path = os.path.join(os.path.expanduser("~"), ".bloom_user") if os.path.exists(bloom_user_path): with open(bloom_user_path, "r") as f: gh_username = f.read().strip() gh_username = gh_username or getpass.getuser() response = raw_input("github user name [{0}]: ".format(gh_username)) if response: gh_username = response info("Would you like bloom to store your github user name (~/.bloom_user)?") if maybe_continue(): with open(bloom_user_path, "w") as f: f.write(gh_username) else: with open(bloom_user_path, "w") as f: f.write(" ") warning("If you want to have bloom store it in the future remove the ~/.bloom_user file.") # Get the github password gh_password = getpass.getpass("github password (This is not stored):") if not gh_password or not gh_username: error("Either the github username or github password is not set.") warning("Skipping the pull request...") return # Check for fork info(fmt("@{bf}@!==> @|@!Checking for rosdistro fork on github...")) gh_user_repos = fetch_github_api("https://api.github.com/users/{0}/repos".format(gh_username), use_pagination=True) if gh_user_repos is None: error("Failed to get a list of repositories for user: '******'".format(gh_username)) warning("Skipping the pull request...") return if "rosdistro" not in [x["name"] for x in gh_user_repos if "name" in x]: warning( "Github user '{0}' does not have a fork ".format(gh_username) + "of the {0}:{1} repository, create one?".format(gh_org, gh_repo) ) if not maybe_continue(): warning("Skipping the pull request...") return # Create a fork create_fork(gh_org, gh_repo, gh_username, gh_password) # Clone the fork info(fmt("@{bf}@!==> @|@!" + "Cloning {0}/{1}...".format(gh_username, gh_repo))) temp_dir = tempfile.mkdtemp() new_branch = None title = "{0}: {1} in '{2}' [bloom]".format(repository, version, gh_path) body = """\ Increasing version of package(s) in repository `{0}`: - previous version: `{1}` - new version: `{2}` - distro file: `{3}` - bloom version: `{4}` """.format( repository, orig_version or "null", version, gh_path, bloom.__version__ ) with change_directory(temp_dir): def _my_run(cmd): info(fmt("@{bf}@!==> @|@!" + str(cmd))) # out = check_output(cmd, stderr=subprocess.STDOUT, shell=True) out = None from subprocess import call call(cmd, shell=True) if out: info(out, use_prefix=False) _my_run("git clone https://github.com/{0}/{1}.git".format(gh_username, gh_repo)) with change_directory(gh_repo): _my_run("git remote add bloom https://github.com/{0}/{1}.git".format(gh_org, gh_repo)) _my_run("git remote update") _my_run("git fetch") track_branches() branches = get_branches() new_branch = "bloom-{repository}-{count}" count = 0 while new_branch.format(repository=repository, count=count) in branches: count += 1 new_branch = new_branch.format(repository=repository, count=count) # Final check info(fmt("@{cf}Pull Request Title: @{yf}" + title)) info(fmt("@{cf}Pull Request Body : \n@{yf}" + body)) msg = fmt( "@!Open a @|@{cf}pull request@| @!@{kf}from@| @!'@|@!@{bf}" + "{gh_username}/{gh_repo}:{new_branch}".format(**locals()) + "@|@!' @!@{kf}into@| @!'@|@!@{bf}" + "{gh_org}/{gh_repo}:{gh_branch}".format(**locals()) + "@|@!'?" ) info(msg) if not maybe_continue(): warning("Skipping the pull request...") return _my_run("git checkout -b {0} bloom/{1}".format(new_branch, gh_branch)) with open("{0}".format(gh_path), "w") as f: info(fmt("@{bf}@!==> @|@!Writing new distribution file: ") + str(gh_path)) f.write(updated_distro_file) _my_run("git add {0}".format(gh_path)) _my_run('git commit -m "{0}"'.format(title)) _my_run("git push origin {0}".format(new_branch)) # Open the pull request return create_pull_request(gh_org, gh_repo, gh_username, gh_password, gh_branch, new_branch, title, body)
def perform_release(repository, track, distro, new_track, interactive, pretend): release_repo = get_release_repo(repository, distro) with change_directory(release_repo.get_path()): # Check to see if the old bloom.conf exists if check_for_bloom_conf(repository): # Convert to a track info("Old bloom.conf file detected.") info(fmt("@{gf}@!==> @|Converting to bloom.conf to track")) convert_old_bloom_conf(None if new_track else distro) upconvert_bloom_to_config_branch() # Check that the track is valid tracks_dict = get_tracks_dict_raw() # If new_track, create the new track first if new_track: if not track: error("You must specify a track when creating a new one.", exit=True) if track in tracks_dict['tracks']: warning("Track '{0}' exists, editing...".format(track)) edit_track_cmd(track) tracks_dict = get_tracks_dict_raw() else: # Create a new track called <track>, # copying an existing track if possible, # and overriding the ros_distro warning("Creating track '{0}'...".format(track)) overrides = {'ros_distro': distro} new_track_cmd(track, copy_track='', overrides=overrides) tracks_dict = get_tracks_dict_raw() if track and track not in tracks_dict['tracks']: error("Given track '{0}' does not exist in release repository.". format(track)) error("Available tracks: " + str(tracks_dict['tracks'].keys()), exit=True) elif not track: tracks = tracks_dict['tracks'].keys() # Error out if there are no tracks if len(tracks) == 0: error("Release repository has no tracks.") info("Manually clone the repository:") info(" git clone {0}".format(release_repo.get_url())) info("And then create a new track:") info(" git-bloom-config new <track name>") error("Run again after creating a track.", exit=True) # Error out if there is more than one track if len(tracks) != 1: error("No track specified and there is not just one track.") error("Please specify one of the available tracks: " + str(tracks), exit=True) # Get the only track track = tracks[0] start_summary(track) # Ensure the track is complete track_dict = tracks_dict['tracks'][track] track_dict = update_track(track_dict) tracks_dict['tracks'][track] = track_dict # Set the release repositories' remote if given release_repo_url = track_dict.get('release_repo_url', None) if release_repo_url is not None: info( fmt("@{gf}@!==> @|") + "Setting release repository remote url to '{0}'".format( release_repo_url)) cmd = 'git remote set-url origin ' + release_repo_url info(fmt("@{bf}@!==> @|@!") + str(cmd)) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Setting the remote url failed, exiting.", exit=True) # Check for push permissions try: info( fmt("@{gf}@!==> @|Testing for push permission on release repository" )) cmd = 'git remote -v' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) # Dry run will authenticate, but not push cmd = 'git push --dry-run' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Write the track config before releasing write_tracks_dict_raw(tracks_dict) # Run the release info( fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'".format( repository, track)) cmd = 'git-bloom-release ' + str(track) if pretend: cmd += ' --pretend' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info( fmt(_success) + "Released '{0}' using release track '{1}' successfully".format( repository, track)) # Commit the summary update_summary(track, repository, distro) commit_summary() # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info( fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'".format( repository)) cmd = 'git push --all' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error( "Pushing changes failed, would you like to add '--force' to 'git push --all'?" ) if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info( fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'".format(repository)) cmd = 'git push --tags' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error( "Pushing changes failed, would you like to add '--force' to 'git push --tags'?" ) if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully") # Propose github pull request info( fmt("@{gf}@!==> @|") + "Generating pull request to distro file located at '{0}'".format( get_release_file_url(distro))) try: pull_request_url = open_pull_request(track, repository, distro) if pull_request_url: info( fmt(_success) + "Pull request opened at: {0}".format(pull_request_url)) if 'BLOOM_NO_WEBBROWSER' in os.environ and platform.system( ) not in ['Darwin']: webbrowser.open(pull_request_url) else: info( "The release of your packages was successful, but the pull request failed." ) info( "Please manually open a pull request by editing the file here: '{0}'" .format(get_release_file_url(distro))) info(fmt(_error) + "No pull request opened.") except Exception as e: debug(traceback.format_exc()) error("Failed to open pull request: {0} - {1}".format( type(e).__name__, e), exit=True)