def get_repository_info_from_user(): data = {} while True: vcs_type = safe_input('VCS type [git, svn, hg, bzr]: ') if vcs_type in ['git', 'svn', 'hg', 'bzr']: break error("'{0}' is not a valid vcs type.".format(vcs_type)) if not maybe_continue(msg='Try again'): return {} data['type'] = vcs_type while True: url = safe_input('VCS url: ') if url: break error("Nothing entered for url.") if not maybe_continue(msg='Try again'): return {} data['url'] = url while True: version = safe_input('VCS version [commit, tag, branch, etc]: ') if version: break error("Nothing entered for version.") if not maybe_continue(msg='Try again'): return {} data['version'] = version return data
def post_patch(self, destination, color='bluef'): if destination in self.debian_branches: return # Tag after patches have been applied with inbranch(destination): # Tag tag_name = self.tag_names[destination] if tag_exists(tag_name): if self.interactive: warning("Tag exists: " + tag_name) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) else: warning("Overwriting tag: " + tag_name) else: info("Creating tag: " + tag_name) execute_command('git tag -f ' + tag_name) # Report of success name = destination.split('/')[-1] package = self.packages[name] distro = destination.split('/')[-2] info(ansi(color) + "####" + ansi('reset'), use_prefix=False) info(ansi(color) + "#### " + ansi('greenf') + "Successfully" + ansi(color) + " generated '" + ansi('boldon') + distro + ansi('boldoff') + "' debian for package" " '" + ansi('boldon') + package.name + ansi('boldoff') + "'" + " at version '" + ansi('boldon') + package.version + "-" + str(self.debian_inc) + ansi('boldoff') + "'" + ansi('reset'), use_prefix=False) info(ansi(color) + "####\n" + ansi('reset'), use_prefix=False)
def pre_modify(self): info("\nPre-verifying Debian dependency keys...") # Run rosdep update is needed if not self.has_run_rosdep: self.update_rosdep() peer_packages = [p.name for p in self.packages.values()] while not self._check_all_keys_are_valid(peer_packages): error( "Some of the dependencies for packages in this repository could not be resolved by rosdep." ) error( "You can try to address the issues which appear above and try again if you wish." ) try: if not maybe_continue(msg="Would you like to try again?"): error("User aborted after rosdep keys were not resolved.") sys.exit(code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO) except (KeyboardInterrupt, EOFError): error("\nUser quit.", exit=True) update_rosdep() invalidate_view_cache() info("All keys are " + ansi('greenf') + "OK" + ansi('reset') + "\n")
def post_patch(self, destination, color='bluef'): if destination in self.rpm_branches: return # Tag after patches have been applied with inbranch(destination): # Tag tag_name = self.tag_names[destination] if tag_exists(tag_name): if self.interactive: warning("Tag exists: " + tag_name) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) else: warning("Overwriting tag: " + tag_name) else: info("Creating tag: " + tag_name) execute_command('git tag -f ' + tag_name) # Report of success name = destination.split('/')[-1] package = self.packages[name] distro = destination.split('/')[-2] info(ansi(color) + "####" + ansi('reset'), use_prefix=False) info( ansi(color) + "#### " + ansi('greenf') + "Successfully" + ansi(color) + " generated '" + ansi('boldon') + distro + ansi('boldoff') + "' RPM for package" " '" + ansi('boldon') + package.name + ansi('boldoff') + "'" + " at version '" + ansi('boldon') + package.version + "-" + str(self.rpm_inc) + ansi('boldoff') + "'" + ansi('reset'), use_prefix=False ) info(ansi(color) + "####\n" + ansi('reset'), use_prefix=False)
def resolve_rosdep_key(rosdep_key, view, try_again=True): from rosdep2.catkin_support import resolve_for_os from rosdep2.lookup import ResolutionError try: return resolve_for_os(rosdep_key, view, self.apt_installer, os_name, debian_distro) except (KeyError, ResolutionError) as err: if rosdep_key in self.packages: return [sanitize_package_name( 'ros-{0}-{1}'.format(self.rosdistro, rosdep_key) )] if type(err) == KeyError: error( "Could not resolve rosdep key '" + rosdep_key + "'" ) else: error( "Could not resolve the rosdep key '" + rosdep_key + "' for distro '" + debian_distro + "': \n" ) info(str(err), use_prefix=False) if try_again: error("Resolve problem with rosdep and then continue to try again.") if maybe_continue(): self.update_rosdep() new_view = self.get_rosdep_view(debian_distro, os_name) return resolve_rosdep_key(rosdep_key, new_view) self.exit("Failed to resolve rosdep key '{0}', aborting." .format(rosdep_key))
def pre_modify(self): info("\nPre-verifying RPM dependency keys...") # Run rosdep update is needed if not self.has_run_rosdep: self.update_rosdep() peer_packages = [p.name for p in self.packages.values()] while not self._check_all_keys_are_valid(peer_packages, self.rosdistro): error("Some of the dependencies for packages in this repository could not be resolved by rosdep.") error("You can try to address the issues which appear above and try again if you wish, " "or continue without releasing into RPM-based distributions (e.g. Fedora 24).") try: if not maybe_continue(msg="Would you like to try again?"): error("User aborted after rosdep keys were not resolved.") sys.exit(code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO) except (KeyboardInterrupt, EOFError): error("\nUser quit.", exit=True) update_rosdep() invalidate_view_cache() info("All keys are " + ansi('greenf') + "OK" + ansi('reset') + "\n") for package in self.packages.values(): if not package.licenses or not package.licenses[0]: error("No license set for package '{0}', aborting.".format(package.name), exit=True)
def resolve_rosdep_key(key, os_name, os_version, ros_distro=None, ignored=None, retry=True): ignored = ignored or [] ctx = create_default_installer_context() try: installer_key = ctx.get_default_os_installer_key(os_name) except KeyError: BloomGenerator.exit("Could not determine the installer for '{0}'".format(os_name)) installer = ctx.get_installer(installer_key) ros_distro = ros_distro or DEFAULT_ROS_DISTRO view = get_view(os_name, os_version, ros_distro) try: return resolve_more_for_os(key, view, installer, os_name, os_version) except (KeyError, ResolutionError) as exc: debug(traceback.format_exc()) if key in ignored: return None, None, None if isinstance(exc, KeyError): error("Could not resolve rosdep key '{0}'".format(key)) returncode = code.GENERATOR_NO_SUCH_ROSDEP_KEY else: error("Could not resolve rosdep key '{0}' for distro '{1}':".format(key, os_version)) info(str(exc), use_prefix=False) returncode = code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO if retry: error("Try to resolve the problem with rosdep and then continue.") if maybe_continue(): update_rosdep() invalidate_view_cache() return resolve_rosdep_key(key, os_name, os_version, ros_distro, ignored, retry=True) BloomGenerator.exit("Failed to resolve rosdep key '{0}', aborting.".format(key), returncode=returncode)
def run_generator(generator, arguments): try: gen = generator try_execute('generator handle arguments', '', gen.handle_arguments, arguments) try_execute('generator summarize', '', gen.summarize) if arguments.interactive: if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) for branch_args in generator.get_branching_arguments(): parsed_branch_args = parse_branch_args(branch_args, arguments.interactive) destination, source, interactive = parsed_branch_args # Summarize branch command msg = summarize_branch_cmd(destination, source, interactive) ### Run pre - branch - post # Pre branch try_execute('generator pre_branch', msg, gen.pre_branch, destination, source) # Branch try_execute('git-bloom-branch', msg, execute_branch, source, destination, interactive) # Post branch try_execute('generator post_branch', msg, gen.post_branch, destination, source) ### Run pre - export patches - post # Pre patch try_execute('generator pre_export_patches', msg, gen.pre_export_patches, destination) # Export patches try_execute('git-bloom-patch export', msg, export_patches) # Post branch try_execute('generator post_export_patches', msg, gen.post_export_patches, destination) ### Run pre - rebase - post # Pre rebase try_execute('generator pre_rebase', msg, gen.pre_rebase, destination) # Rebase ret = try_execute('git-bloom-patch rebase', msg, rebase_patches) # Post rebase try_execute('generator post_rebase', msg, gen.post_rebase, destination) ### Run pre - import patches - post # Pre patch try_execute('generator pre_patch', msg, gen.pre_patch, destination) if ret == 0: # Import patches try_execute('git-bloom-patch import', msg, import_patches) elif ret < 0: debug("Skipping patching because rebase did not run.") # Post branch try_execute('generator post_patch', msg, gen.post_patch, destination) except CommandFailed as err: sys.exit(err.returncode or 1)
def post_rebase(self, destination): # Determine the current package being generated name = destination.split('/')[-1] distro = destination.split('/')[-2] # Retrieve the stackage stackage, kind = self.packages[name] # Ask to continue if interactive if self.interactive: if not maybe_continue('y'): error("Answered no to continue, aborting.") return code.ANSWERED_NO_TO_CONTINUE ### Start debian generation # Get time of day from dateutil import tz stamp = datetime.datetime.now(tz.tzlocal()) # Convert stackage to debian data data = self.convert_stackage_to_debian_data(stackage, kind) # Get apt_installer from rosdep from rosdep2.catkin_support import get_installer self.apt_installer = get_installer(APT_INSTALLER) # Create debians for each distro with inbranch(destination): self.generate_debian(data, stamp, distro) # Create the tag name for later self.tag_names[destination] = self.generate_tag_name(data) # Update the patch configs patches_branch = 'patches/' + destination config = get_patch_config(patches_branch) # Store it self.store_original_config(config, patches_branch) # Modify the base so import/export patch works config['base'] = get_commit_hash(get_current_branch()) # Set it set_patch_config(patches_branch, config)
def pre_modify(self): info("\nPre-verifying RPM dependency keys...") # Run rosdep update is needed if not self.has_run_rosdep: self.update_rosdep() peer_packages = [p.name for p in self.packages.values()] while not self._check_all_keys_are_valid(peer_packages, self.rosdistro): error("Some of the dependencies for packages in this repository could not be resolved by rosdep.") if not self.interactive: sys.exit(code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO) error("You can try to address the issues which appear above and try again if you wish, " "or continue without releasing into RPM-based distributions (e.g. Fedora 24).") try: if not maybe_continue(msg="Would you like to try again?"): error("User aborted after rosdep keys were not resolved.") sys.exit(code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO) except (KeyboardInterrupt, EOFError): error("\nUser quit.", exit=True) update_rosdep() invalidate_view_cache() info("All keys are " + ansi('greenf') + "OK" + ansi('reset') + "\n") for package in self.packages.values(): if not package.licenses or not package.licenses[0]: error("No license set for package '{0}', aborting.".format(package.name), exit=True)
def place_template_files(self, build_type, debian_dir='debian'): # Create/Clean the debian folder if os.path.exists(debian_dir): if self.interactive: warning("debian directory exists: " + debian_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) elif 'BLOOM_CLEAR_DEBIAN_ON_GENERATION' in os.environ: warning("Overwriting debian directory: " + debian_dir) execute_command('git rm -rf ' + debian_dir) execute_command( 'git commit -m "Clearing previous debian folder"') if os.path.exists(debian_dir): shutil.rmtree(debian_dir) else: warning("Not overwriting debian directory.") # Use generic place template files command place_template_files('.', build_type, gbp=True) # Commit results execute_command('git add ' + debian_dir) _, has_files, _ = execute_command('git diff --cached --name-only', return_io=True) if has_files: execute_command('git commit -m "Placing debian template files"')
def post_patch(self, destination, color='bluef'): # Tag after patches have been applied with inbranch(destination): # Tag tag_name = self.tag_names[destination] if tag_exists(tag_name): if self.interactive: warning("Tag exists: " + tag_name) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.") return code.ANSWERED_NO_TO_CONTINUE else: warning("Overwriting tag: " + tag_name) else: info("Creating tag: " + tag_name) execute_command('git tag -f ' + tag_name) # Report of success name = destination.split('/')[-1] stackage, kind = self.packages[name] distro = destination.split('/')[-2] info(ansi(color) + "####" + ansi('reset'), use_prefix=False) info( ansi(color) + "#### " + ansi('greenf') + "Successfully" + \ ansi(color) + " generated '" + ansi('boldon') + distro + \ ansi('boldoff') + "' debian for " + kind + \ " '" + ansi('boldon') + stackage.name + ansi('boldoff') + "'" + \ " at version '" + ansi('boldon') + stackage.version + \ "-" + str(self.debian_inc) + ansi('boldoff') + "'" + \ ansi('reset'), use_prefix=False ) info(ansi(color) + "####\n" + ansi('reset'), use_prefix=False)
def update_track(track_dict): for key, value in DEFAULT_TEMPLATE.iteritems(): if key in ['actions']: if track_dict[key] != DEFAULT_TEMPLATE[key]: warning("""\ Your track's '{0}' configuration is not the same as the default, should it be updated to the default setting?""" .format(key)) if maybe_continue('n'): track_dict[key] = DEFAULT_TEMPLATE[key] elif key not in track_dict: value = value.default if isinstance(value, PromptEntry) else value track_dict[key] = value return track_dict
def check_git_init(): if get_root() is None: error("Not in a valid git repository", exit=True) cmd = 'git show-ref --heads' result = execute_command(cmd, autofail=False, silent_error=True) if result != 0: info("Freshly initialized git repository detected.") info("An initial empty commit is going to be made.") if not maybe_continue(): error("Answered no to continue, exiting.", exit=True) # Make an initial empty commit execute_command('git commit --allow-empty -m "Initial commit"', silent=True)
def get_repo_uri(repository, distro): url = None # Fetch the distro file distribution_file = get_distribution_file(distro) if repository in distribution_file.repositories and \ distribution_file.repositories[repository].release_repository is not None: url = distribution_file.repositories[repository].release_repository.url else: error("Specified repository '{0}' is not in the distribution file located at '{1}'" .format(repository, get_disitrbution_file_url(distro))) matches = difflib.get_close_matches(repository, distribution_file.repositories) if matches: info(fmt("@{yf}Did you mean one of these: '" + "', '".join([m for m in matches]) + "'?")) if url is None: info("Could not determine release repository url for repository '{0}' of distro '{1}'" .format(repository, distro)) info("You can continue the release process by manually specifying the location of the RELEASE repository.") info("To be clear this is the url of the RELEASE repository not the upstream repository.") info("For release repositories on github, you should provide the `https://` url which should end in `.git`.") while True: try: url = safe_input('Release repository url [press enter to abort]: ') except (KeyboardInterrupt, EOFError): url = None info('', use_prefix=False) if not url: url = None error("No release repository url given, aborting.", exit=True) break # If github.com address, validate it if url is not None and 'github.com' in url: valid_url = True if not url.endswith('.git') and not url.endswith('.git/'): valid_url = False warning("The release repository url you provided does not end in `.git`.") if not url.startswith('https://'): valid_url = False warning("The release repository url you provided is not a `https://` address.") if not valid_url: warning("Would you like to enter the address again?") if maybe_continue(): url = None continue else: info("Very well, the address '{url}' will be used as is.".format(**locals())) break break global _user_provided_release_url _user_provided_release_url = url return url
def get_github_interface(): # First check to see if the oauth token is stored oauth_config_path = os.path.join(os.path.expanduser('~'), '.config', 'bloom') config = {} if os.path.exists(oauth_config_path): with open(oauth_config_path, 'r') as f: config = json.loads(f.read()) token = config.get('oauth_token', None) username = config.get('github_user', None) if token and username: return Github(username, auth=auth_header_from_oauth_token(token), token=token) if not os.path.isdir(os.path.dirname(oauth_config_path)): os.makedirs(os.path.dirname(oauth_config_path)) # Ok, now we have to ask for the user name and pass word info("") warning("Looks like bloom doesn't have an oauth token for you yet.") warning("Therefore bloom will require your Github username and password just this once.") warning("With your Github username and password bloom will create an oauth token on your behalf.") warning("The token will be stored in `~/.config/bloom`.") warning("You can delete the token from that file to have a new token generated.") warning("Guard this token like a password, because it allows someone/something to act on your behalf.") warning("If you need to unauthorize it, remove it from the 'Applications' menu in your Github account page.") info("") token = None while token is None: try: username = getpass.getuser() username = safe_input("Github username [{0}]: ".format(username)) or username password = getpass.getpass("Github password (never stored): ") except (KeyboardInterrupt, EOFError): return None if not password: error("No password was given, aborting.") return None gh = Github(username, auth=auth_header_from_basic_auth(username, password)) try: token = gh.create_new_bloom_authorization(update_auth=True) with open(oauth_config_path, 'a') as f: config.update({'oauth_token': token, 'github_user': username}) f.write(json.dumps(config)) info("The token '{token}' was created and stored in the bloom config file: '{oauth_config_path}'" .format(**locals())) except GithubException as exc: error("{0}".format(exc)) info("") warning("This sometimes fails when the username or password are incorrect, try again?") if not maybe_continue(): return None return gh
def place_tempalte_files(self, debian_dir='debian'): # Create/Clean the debian folder if os.path.exists(debian_dir): if self.interactive: warning("Debian directory exists: " + debian_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.") return code.ANSWERED_NO_TO_CONTINUE else: warning("Overwriting Debian directory: " + debian_dir) execute_command('git rm -rf ' + debian_dir) execute_command('git commit -m "Clearing previous debian folder"') if os.path.exists(debian_dir): shutil.rmtree(debian_dir) os.makedirs(debian_dir) # Place template files templates = [ 'changelog.em', 'control.em', 'gbp.conf.em', 'rules.em' ] for template_file in templates: template_path = os.path.join('templates', template_file) # Get the template contents using pkg_resources group = 'bloom.generators.debian' # info("Looking for template: " + group + ':' + template_path) try: template = pkg_resources.resource_string(group, template_path) except IOError as err: error("Failed to load template " "'{0}': {1}".format(template_file, str(err))) self.exit(code.DEBIAN_FAILED_TO_LOAD_TEMPLATE) with open(os.path.join(debian_dir, template_file), 'w') as f: f.write(template) # Create the compat file compat_path = os.path.join(debian_dir, 'compat') with open(compat_path, 'w+') as f: print("7", file=f) # Create the source/format file source_dir = os.path.join(debian_dir, 'source') os.makedirs(source_dir) format_path = os.path.join(source_dir, 'format') with open(format_path, 'w+') as f: print("3.0 (quilt)", file=f) # Commit results execute_command('git add ' + debian_dir) execute_command('git commit -m "Placing debian template files"')
def check_git_init(): if get_root() is None: error("Not is a valid git repository") return code.NOT_A_GIT_REPOSITORY cmd = 'git show-ref --heads' result = execute_command(cmd, shell=True, autofail=False, silent_error=True) if result != 0: info("Freshly initialized git repository detected.") info("An initial empty commit is going to be made.") if not maybe_continue(): error("Answered no to continue, exiting.") return 1 # Make an initial empty commit execute_command('git commit -m "initial commit" --allow-empty') return 0
def resolve_rosdep_key(key, os_name, os_version, ros_distro=None, ignored=None, retry=True): ignored = ignored or [] ctx = create_default_installer_context() try: installer_key = ctx.get_default_os_installer_key(os_name) except KeyError: BloomGenerator.exit( "Could not determine the installer for '{0}'".format(os_name)) installer = ctx.get_installer(installer_key) ros_distro = ros_distro or DEFAULT_ROS_DISTRO view = get_view(os_name, os_version, ros_distro) try: return resolve_more_for_os(key, view, installer, os_name, os_version) except (KeyError, ResolutionError) as exc: debug(traceback.format_exc()) if key in ignored: return None, None, None if isinstance(exc, KeyError): error("Could not resolve rosdep key '{0}'".format(key)) returncode = code.GENERATOR_NO_SUCH_ROSDEP_KEY else: error( "Could not resolve rosdep key '{0}' for distro '{1}':".format( key, os_version)) info(str(exc), use_prefix=False) returncode = code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO if retry: error("Try to resolve the problem with rosdep and then continue.") if maybe_continue(): update_rosdep() invalidate_view_cache() return resolve_rosdep_key(key, os_name, os_version, ros_distro, ignored, retry=True) BloomGenerator.exit( "Failed to resolve rosdep key '{0}', aborting.".format(key), returncode=returncode)
def place_template_files(self, rpm_dir='rpm'): # Create/Clean the rpm folder if os.path.exists(rpm_dir): if self.interactive: warning("rpm directory exists: " + rpm_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) else: warning("Overwriting rpm directory: " + rpm_dir) execute_command('git rm -rf ' + rpm_dir) execute_command('git commit -m "Clearing previous rpm folder"') if os.path.exists(rpm_dir): shutil.rmtree(rpm_dir) # Use generic place template files command place_template_files('.', gbp=True) # Commit results execute_command('git add ' + rpm_dir) execute_command('git commit -m "Placing rpm template files"')
def update_track(track_dict): for key, value in DEFAULT_TEMPLATE.items(): if key in ['actions']: if track_dict[key] != DEFAULT_TEMPLATE[key]: warning("Your track's '{0}' configuration is not the same as the default." .format(key)) default = 'n' if key == 'actions': default = 'y' warning("Unless you have manually modified your 'actions' " "(the commands which get run for a release), " "you should update to the new default.") warning("Should it be updated to the default setting?") if maybe_continue(default): track_dict[key] = DEFAULT_TEMPLATE[key] elif key not in track_dict: value = value.default if isinstance(value, PromptEntry) else value track_dict[key] = value return track_dict
def place_template_files(self, arch_dir='arch'): # Create/Clean the arch folder if os.path.exists(arch_dir): if self.interactive: warning("arch directory exists: " + arch_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) else: warning("Overwriting arch directory: " + arch_dir) execute_command('git rm -rf ' + arch_dir) execute_command('git commit -m "Clearing previous arch folder"') if os.path.exists(arch_dir): shutil.rmtree(arch_dir) # Use generic place template files command place_template_files('.') # Commit results execute_command('git add ' + arch_dir) execute_command('git commit -m "Placing arch template files"')
def place_template_files(self, build_type, rpm_dir='rpm'): # Create/Clean the rpm folder if os.path.exists(rpm_dir): if self.interactive: warning("rpm directory exists: " + rpm_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) else: warning("Overwriting rpm directory: " + rpm_dir) execute_command('git rm -rf ' + rpm_dir) execute_command('git commit -m "Clearing previous rpm folder"') if os.path.exists(rpm_dir): shutil.rmtree(rpm_dir) # Use generic place template files command place_template_files('.', build_type, gbp=True) # Commit results execute_command('git add ' + rpm_dir) execute_command('git commit -m "Placing rpm template files"')
def validate_github_url(url, url_type): if 'github.com' not in url: return True valid_url = True if not url.endswith('.git') and not url.endswith('.git/'): valid_url = False warning("The {0} repository url you provided does not end in `.git`." .format(url_type)) if not url.startswith('https://'): valid_url = False warning("The {0} repository url you provided is not a `https://` address." .format(url_type)) if not valid_url: if maybe_continue(msg="Would you like to enter the address again"): return False else: warning("Very well, the address '{0}' will be used as is.".format(url)) return True # url is OK return True
def update_track(track_dict): for key, value in DEFAULT_TEMPLATE.items(): if key in ['actions']: if track_dict[key] != DEFAULT_TEMPLATE[key]: warning( "Your track's '{0}' configuration is not the same as the default." .format(key)) default = 'n' if key == 'actions' and track_dict[key] in ACTION_LIST_HISTORY: default = 'y' warning("Unless you have manually modified your 'actions' " "(the commands which get run for a release), " "you should update to the new default.") warning("Should it be updated to the default setting?") if maybe_continue(default): track_dict[key] = DEFAULT_TEMPLATE[key] elif key not in track_dict: value = value.default if isinstance(value, PromptEntry) else value track_dict[key] = value return track_dict
def place_template_files(self, debian_dir='debian'): # Create/Clean the debian folder if os.path.exists(debian_dir): if self.interactive: warning("debian directory exists: " + debian_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.", exit=True) elif 'BLOOM_CLEAR_DEBIAN_ON_GENERATION' in os.environ: warning("Overwriting debian directory: " + debian_dir) execute_command('git rm -rf ' + debian_dir) execute_command('git commit -m "Clearing previous debian folder"') if os.path.exists(debian_dir): shutil.rmtree(debian_dir) else: warning("Not overwriting debian directory.") # Use generic place template files command place_template_files('.', gbp=True) # Commit results execute_command('git add ' + debian_dir) execute_command('git commit -m "Placing debian template files"')
def pre_modify(self): info("\nPre-verifying Debian dependency keys...") # Run rosdep update is needed if not self.has_run_rosdep: self.update_rosdep() peer_packages = [p.name for p in self.packages.values()] while not self._check_all_keys_are_valid(peer_packages): error("Some of the dependencies for packages in this repository could not be resolved by rosdep.") error("You can try to address the issues which appear above and try again if you wish.") try: if not maybe_continue(msg="Would you like to try again?"): error("User aborted after rosdep keys were not resolved.") sys.exit(code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO) except (KeyboardInterrupt, EOFError): error("\nUser quit.", exit=True) update_rosdep() invalidate_view_cache() info("All keys are " + ansi('greenf') + "OK" + ansi('reset') + "\n")
def resolve_rosdep_key(key, os_name, os_version, ros_distro=None, ignored=None, retry=True): ignored = ignored or [] if os_name not in default_installers: BloomGenerator.exit( "Could not determine the installer for '{0}'".format(os_name)) installer = get_installer(default_installers[os_name][0]) ros_distro = ros_distro or DEFAULT_ROS_DISTRO view = get_view(os_name, os_version, ros_distro) try: return resolve_for_os(key, view, installer, os_name, os_version) except (KeyError, ResolutionError) as exc: debug(traceback.format_exc()) if key in ignored: return None if isinstance(exc, KeyError): error("Could not resolve rosdep key '{0}'".format(key)) else: error( "Could not resolve rosdep key '{0}' for distro '{1}':".format( key, os_version)) info(str(exc), use_prefix=False) if retry: error("Try to resolve the problem with rosdep and then continue.") if maybe_continue(): update_rosdep() invalidate_view_cache() return resolve_rosdep_key(key, os_name, os_version, ros_distro, ignored, retry=True) BloomGenerator.exit( "Failed to resolve rosdep key '{0}', aborting.".format(key))
def resolve_rosdep_key( key, os_name, os_version, ros_distro=None, ignored=None, retry=True ): ignored = ignored or [] if os_name not in default_installers: BloomGenerator.exit("Could not determine the installer for '{0}'" .format(os_name)) installer = get_installer(default_installers[os_name][0]) ros_distro = ros_distro or DEFAULT_ROS_DISTRO view = get_view(os_name, os_version, ros_distro) try: return resolve_for_os(key, view, installer, os_name, os_version) except (KeyError, ResolutionError) as exc: debug(traceback.format_exc()) if key in ignored: return None if isinstance(exc, KeyError): error("Could not resolve rosdep key '{0}'".format(key)) else: error("Could not resolve rosdep key '{0}' for distro '{1}':" .format(key, os_version)) info(str(exc), use_prefix=False) if retry: error("Try to resolve the problem with rosdep and then continue.") if maybe_continue(): update_rosdep() invalidate_view_cache() return resolve_rosdep_key(key, os_name, os_version, ros_distro, ignored, retry=True) BloomGenerator.exit("Failed to resolve rosdep key '{0}', aborting." .format(key))
def run_generator(generator, arguments): try: gen = generator try_execute('generator handle arguments', '', gen.handle_arguments, arguments) try_execute('generator summarize', '', gen.summarize) if arguments.interactive: if not maybe_continue('y'): error("Answered no to continue, aborting.") return code.ANSWERED_NO_TO_CONTINUE for branch_args in generator.get_branching_arguments(): parsed_branch_args = parse_branch_args(branch_args, arguments.interactive) destination, source, interactive = parsed_branch_args # Summarize branch command msg = summarize_branch_cmd(destination, source, interactive) ### Run pre - branch - post # Pre branch try_execute('generator pre_branch', msg, gen.pre_branch, destination, source) # Branch try_execute('git-bloom-branch', msg, execute_branch, source, destination, interactive) # Post branch try_execute('generator post_branch', msg, gen.post_branch, destination, source) ### Run pre - export patches - post # Pre patch try_execute('generator pre_export_patches', msg, gen.pre_export_patches, destination) # Export patches try_execute('git-bloom-patch export', msg, export_patches) # Post branch try_execute('generator post_export_patches', msg, gen.post_export_patches, destination) ### Run pre - rebase - post # Pre rebase try_execute('generator pre_rebase', msg, gen.pre_rebase, destination) # Rebase ret = try_execute('git-bloom-patch rebase', msg, rebase_patches) # Post rebase try_execute('generator post_rebase', msg, gen.post_rebase, destination) ### Run pre - import patches - post if ret == 0: # Pre patch try_execute('generator pre_patch', msg, gen.pre_patch, destination) # Import patches try_execute('git-bloom-patch import', msg, import_patches) # Post branch try_execute('generator post_patch', msg, gen.post_patch, destination) elif ret == code.NOTHING_TO_DO: debug("Skipping patching because rebase did run.") except CommandFailed as err: return err.returncode or 1 return 0
def get_github_interface(quiet=False): def mfa_prompt(oauth_config_path, username): """Explain how to create a token for users with Multi-Factor Authentication configured.""" warning( "Receiving 401 when trying to create an oauth token can be caused by the user " "having two-factor authentication enabled.") warning( "If 2FA is enabled, the user will have to create an oauth token manually." ) warning("A token can be created at https://github.com/settings/tokens") warning( "The resulting token can be placed in the '{oauth_config_path}' file as such:" .format(**locals())) info("") warning( '{{"github_user": "******", "oauth_token": "TOKEN_GOES_HERE"}}' .format(**locals())) info("") global _gh if _gh is not None: return _gh # First check to see if the oauth token is stored oauth_config_path = os.path.join(os.path.expanduser('~'), '.config', 'bloom') config = {} if os.path.exists(oauth_config_path): with open(oauth_config_path, 'r') as f: config = json.loads(f.read()) token = config.get('oauth_token', None) username = config.get('github_user', None) if token and username: return Github(username, auth=auth_header_from_token(username, token), token=token) if not os.path.isdir(os.path.dirname(oauth_config_path)): os.makedirs(os.path.dirname(oauth_config_path)) if quiet: return None # Ok, now we have to ask for the user name and pass word info("") warning("Looks like bloom doesn't have an oauth token for you yet.") warning( "You can create a token by visiting https://github.com/settings/tokens in your browser." ) warning( "For bloom to work the token must have at least `public_repo` scope.") warning( "If you want bloom to be able to automatically update your fork of ros/rosdistro (recommended)" ) warning("then you must also enable the workflow scope for the token.") warning( "If you need to unauthorize it, remove it from the 'Tokens' menu in your GitHub account settings." ) info("") if not maybe_continue('y', 'Would you like to enter an access token now'): return None token = None while token is None: try: username = getpass.getuser() username = safe_input( "GitHub username [{0}]: ".format(username)) or username token = getpass.getpass("GitHub access token: ").strip() except (KeyboardInterrupt, EOFError): return None if not token: error("No token was given, aborting.") return None gh = Github(username, auth=auth_header_from_token(username, token)) try: gh.check_token_validity(username, token, update_auth=True) with open(oauth_config_path, 'w') as f: config.update({'oauth_token': token, 'github_user': username}) f.write(json.dumps(config)) info( "The token '{token}' was created and stored in the bloom config file: '{oauth_config_path}'" .format(**locals())) except GitHubAuthException as exc: error("{0}".format(exc)) mfa_prompt(oauth_config_path, username) except GithubException as exc: error("{0}".format(exc)) info("") if hasattr(exc, 'resp') and '{0}'.format( exc.resp.status) in ['401']: mfa_prompt(oauth_config_path, username) warning( "This sometimes fails when the username or password are incorrect, try again?" ) if not maybe_continue(): return None _gh = gh return gh
def perform_release(repository, track, distro, new_track, interactive, pretend, ssh_pull_request): release_repo = get_release_repo(repository, distro) with change_directory(release_repo.get_path()): # Check to see if the old bloom.conf exists if check_for_bloom_conf(repository): # Convert to a track info("Old bloom.conf file detected.") info(fmt("@{gf}@!==> @|Converting to bloom.conf to track")) convert_old_bloom_conf(None if new_track else distro) upconvert_bloom_to_config_branch() # Check that the track is valid tracks_dict = get_tracks_dict_raw() # If new_track, create the new track first if new_track: if not track: error("You must specify a track when creating a new one.", exit=True) if track in tracks_dict['tracks']: warning("Track '{0}' exists, editing...".format(track)) edit_track_cmd(track) tracks_dict = get_tracks_dict_raw() else: # Create a new track called <track>, # copying an existing track if possible, # and overriding the ros_distro warning("Creating track '{0}'...".format(track)) overrides = {'ros_distro': distro} new_track_cmd(track, copy_track='', overrides=overrides) tracks_dict = get_tracks_dict_raw() if track and track not in tracks_dict['tracks']: error("Given track '{0}' does not exist in release repository." .format(track)) error("Available tracks: " + str(tracks_dict['tracks'].keys()), exit=True) elif not track: tracks = tracks_dict['tracks'].keys() # Error out if there are no tracks if len(tracks) == 0: error("Release repository has no tracks.") info("Manually clone the repository:") info(" git clone {0}".format(release_repo.get_url())) info("And then create a new track:") info(" git-bloom-config new <track name>") error("Run again after creating a track.", exit=True) # Error out if there is more than one track if len(tracks) != 1: error("No track specified and there is not just one track.") error("Please specify one of the available tracks: " + str(tracks), exit=True) # Get the only track track = tracks[0] start_summary(track) # Ensure the track is complete track_dict = tracks_dict['tracks'][track] track_dict = update_track(track_dict) tracks_dict['tracks'][track] = track_dict # Set the release repositories' remote if given release_repo_url = track_dict.get('release_repo_url', None) if release_repo_url is not None: info(fmt("@{gf}@!==> @|") + "Setting release repository remote url to '{0}'" .format(release_repo_url)) cmd = 'git remote set-url origin ' + release_repo_url info(fmt("@{bf}@!==> @|@!") + str(cmd)) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Setting the remote url failed, exiting.", exit=True) # Check for push permissions try: info(fmt( "@{gf}@!==> @|Testing for push permission on release repository" )) cmd = 'git remote -v' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) # Dry run will authenticate, but not push cmd = 'git push --dry-run' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Write the track config before releasing write_tracks_dict_raw(tracks_dict) # Run the release info(fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'" .format(repository, track)) cmd = 'git-bloom-release ' + str(track) if pretend: cmd += ' --pretend' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info(fmt(_success) + "Released '{0}' using release track '{1}' successfully" .format(repository, track)) # Commit the summary update_summary(track, repository, distro) commit_summary() # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info(fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'" .format(repository)) cmd = 'git push --all' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --all'?") if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info(fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'" .format(repository)) cmd = 'git push --tags' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --tags'?") if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully") # Propose github pull request info(fmt("@{gf}@!==> @|") + "Generating pull request to distro file located at '{0}'" .format(get_disitrbution_file_url(distro))) try: pull_request_url = open_pull_request(track, repository, distro, ssh_pull_request) if pull_request_url: info(fmt(_success) + "Pull request opened at: {0}".format(pull_request_url)) if 'BLOOM_NO_WEBBROWSER' in os.environ and platform.system() not in ['Darwin']: webbrowser.open(pull_request_url) else: info("The release of your packages was successful, but the pull request failed.") info("Please manually open a pull request by editing the file here: '{0}'" .format(get_disitrbution_file_url(distro))) info(fmt(_error) + "No pull request opened.") except Exception as e: debug(traceback.format_exc()) error("Failed to open pull request: {0} - {1}".format(type(e).__name__, e), exit=True)
def _perform_release(repository, track, distro, new_track, interactive, pretend, tracks_dict): # Ensure the track is complete track_dict = tracks_dict['tracks'][track] track_dict = update_track(track_dict) tracks_dict['tracks'][track] = track_dict # Set the release repositories' remote if given release_repo_url = track_dict.get('release_repo_url', None) if release_repo_url is not None: info(fmt("@{gf}@!==> @|") + "Setting release repository remote url to '{0}'" .format(release_repo_url)) cmd = 'git remote set-url origin ' + release_repo_url info(fmt("@{bf}@!==> @|@!") + str(cmd)) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Setting the remote url failed, exiting.", exit=True) # Check for push permissions try: info(fmt( "@{gf}@!==> @|Testing for push permission on release repository" )) cmd = 'git remote -v' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) # Dry run will authenticate, but not push cmd = 'git push --dry-run' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Write the track config before releasing write_tracks_dict_raw(tracks_dict) # Run the release info(fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'" .format(repository, track)) cmd = 'git-bloom-release ' + str(track) if pretend: cmd += ' --pretend' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info(fmt(_success) + "Released '{0}' using release track '{1}' successfully" .format(repository, track)) # Commit the summary update_summary(track, repository, distro) commit_summary() # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info(fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'" .format(repository)) cmd = 'git push --all' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --all'?") if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info(fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'" .format(repository)) cmd = 'git push --tags' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --tags'?") if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully")
def perform_release(repository, track, distro, new_track, interactive, pretend): release_repo = get_release_repo(repository, distro) with change_directory(release_repo.get_path()): # Check to see if the old bloom.conf exists if check_for_bloom_conf(repository): # Convert to a track info("Old bloom.conf file detected.") info(fmt("@{gf}@!==> @|Converting to bloom.conf to track")) convert_old_bloom_conf(None if new_track else distro) upconvert_bloom_to_config_branch() # Check that the track is valid tracks_dict = get_tracks_dict_raw() # If new_track, create the new track first if new_track: if not track: error("You must specify a track when creating a new one.", exit=True) if track in tracks_dict['tracks']: warning("Track '{0}' exists, editing...".format(track)) edit_track_cmd(track) tracks_dict = get_tracks_dict_raw() else: # Create a new track called <track>, # copying an existing track if possible, # and overriding the ros_distro warning("Creating track '{0}'...".format(track)) overrides = {'ros_distro': distro} new_track_cmd(track, copy_track='', overrides=overrides) tracks_dict = get_tracks_dict_raw() if track and track not in tracks_dict['tracks']: error("Given track '{0}' does not exist in release repository.". format(track)) error("Available tracks: " + str(tracks_dict['tracks'].keys()), exit=True) elif not track: tracks = tracks_dict['tracks'].keys() # Error out if there are no tracks if len(tracks) == 0: error("Release repository has no tracks.") info("Manually clone the repository:") info(" git clone {0}".format(release_repo.get_url())) info("And then create a new track:") info(" git-bloom-config new <track name>") error("Run again after creating a track.", exit=True) # Error out if there is more than one track if len(tracks) != 1: error("No track specified and there is not just one track.") error("Please specify one of the available tracks: " + str(tracks), exit=True) # Get the only track track = tracks[0] start_summary(track) # Ensure the track is complete track_dict = tracks_dict['tracks'][track] track_dict = update_track(track_dict) tracks_dict['tracks'][track] = track_dict # Set the release repositories' remote if given release_repo_url = track_dict.get('release_repo_url', None) if release_repo_url is not None: info( fmt("@{gf}@!==> @|") + "Setting release repository remote url to '{0}'".format( release_repo_url)) cmd = 'git remote set-url origin ' + release_repo_url info(fmt("@{bf}@!==> @|@!") + str(cmd)) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Setting the remote url failed, exiting.", exit=True) # Check for push permissions try: info( fmt("@{gf}@!==> @|Testing for push permission on release repository" )) cmd = 'git remote -v' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) # Dry run will authenticate, but not push cmd = 'git push --dry-run' info(fmt("@{bf}@!==> @|@!") + str(cmd)) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Write the track config before releasing write_tracks_dict_raw(tracks_dict) # Run the release info( fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'".format( repository, track)) cmd = 'git-bloom-release ' + str(track) if pretend: cmd += ' --pretend' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info( fmt(_success) + "Released '{0}' using release track '{1}' successfully".format( repository, track)) # Commit the summary update_summary(track, repository, distro) commit_summary() # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info( fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'".format( repository)) cmd = 'git push --all' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error( "Pushing changes failed, would you like to add '--force' to 'git push --all'?" ) if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info( fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'".format(repository)) cmd = 'git push --tags' if pretend: cmd += ' --dry-run' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error( "Pushing changes failed, would you like to add '--force' to 'git push --tags'?" ) if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully") # Propose github pull request info( fmt("@{gf}@!==> @|") + "Generating pull request to distro file located at '{0}'".format( get_release_file_url(distro))) try: pull_request_url = open_pull_request(track, repository, distro) if pull_request_url: info( fmt(_success) + "Pull request opened at: {0}".format(pull_request_url)) if 'BLOOM_NO_WEBBROWSER' in os.environ and platform.system( ) not in ['Darwin']: webbrowser.open(pull_request_url) else: info( "The release of your packages was successful, but the pull request failed." ) info( "Please manually open a pull request by editing the file here: '{0}'" .format(get_release_file_url(distro))) info(fmt(_error) + "No pull request opened.") except Exception as e: debug(traceback.format_exc()) error("Failed to open pull request: {0} - {1}".format( type(e).__name__, e), exit=True)
def get_github_interface(quiet=False): def mfa_prompt(oauth_config_path, username): """Explain how to create a token for users with Multi-Factor Authentication configured.""" warning( "Receiving 401 when trying to create an oauth token can be caused by the user " "having two-factor authentication enabled.") warning( "If 2FA is enabled, the user will have to create an oauth token manually." ) warning("A token can be created at https://github.com/settings/tokens") warning( "The resulting token can be placed in the '{oauth_config_path}' file as such:" .format(**locals())) info("") warning( '{{"github_user": "******", "oauth_token": "TOKEN_GOES_HERE"}}' .format(**locals())) info("") global _gh if _gh is not None: return _gh # First check to see if the oauth token is stored oauth_config_path = os.path.join(os.path.expanduser('~'), '.config', 'bloom') config = {} if os.path.exists(oauth_config_path): with open(oauth_config_path, 'r') as f: config = json.loads(f.read()) token = config.get('oauth_token', None) username = config.get('github_user', None) if token and username: return Github(username, auth=auth_header_from_oauth_token(token), token=token) if not os.path.isdir(os.path.dirname(oauth_config_path)): os.makedirs(os.path.dirname(oauth_config_path)) if quiet: return None # Ok, now we have to ask for the user name and pass word info("") warning("Looks like bloom doesn't have an oauth token for you yet.") warning( "Therefore bloom will require your GitHub username and password just this once." ) warning( "With your GitHub username and password bloom will create an oauth token on your behalf." ) warning("The token will be stored in `~/.config/bloom`.") warning( "You can delete the token from that file to have a new token generated." ) warning( "Guard this token like a password, because it allows someone/something to act on your behalf." ) warning( "If you need to unauthorize it, remove it from the 'Applications' menu in your GitHub account page." ) info("") if not maybe_continue('y', "Would you like to create an OAuth token now"): return None token = None while token is None: try: username = getpass.getuser() username = safe_input( "GitHub username [{0}]: ".format(username)) or username password = getpass.getpass("GitHub password (never stored): ") except (KeyboardInterrupt, EOFError): return None if not password: error("No password was given, aborting.") return None gh = Github(username, auth=auth_header_from_basic_auth(username, password)) try: token = gh.create_new_bloom_authorization(update_auth=True) with open(oauth_config_path, 'w') as f: config.update({'oauth_token': token, 'github_user': username}) f.write(json.dumps(config)) info( "The token '{token}' was created and stored in the bloom config file: '{oauth_config_path}'" .format(**locals())) except GitHubAuthException as exc: error("{0}".format(exc)) mfa_prompt(oauth_config_path, username) except GithubException as exc: error("{0}".format(exc)) info("") if hasattr(exc, 'resp') and '{0}'.format( exc.resp.status) in ['401']: mfa_prompt(oauth_config_path, username) warning( "This sometimes fails when the username or password are incorrect, try again?" ) if not maybe_continue(): return None _gh = gh return gh
print("Generating github pages documentation for version '{0}'...".format(ver)) execute_command('make clean', cwd='doc') execute_command('python setup.py build_sphinx') execute_command('sphinxtogithub doc/build/html --verbose') orig_cwd = os.getcwd() clone = GitClone() with clone as clone_dir: execute_command('git clean -fdx') with inbranch('gh-pages'): doc_dir = os.path.join('doc', ver) if os.path.exists(doc_dir): warning("Documentation for version '" + ver + "' already exists.") if not maybe_continue('y'): sys.exit(-1) execute_command('git rm -rf ' + doc_dir) shutil.copytree(os.path.join(orig_cwd, 'doc', 'build', 'html'), doc_dir) p = re.compile('\d*[.]\d*[.]\d*') with open('doc/index.html', 'r') as f: redirect = f.read() redirect = p.sub(ver, redirect) with open('doc/index.html', 'w+') as f: f.write(redirect) execute_command('git add -f ' + os.path.join('doc', ver, '*')) execute_command('git add -f doc/index.html') if has_changes(): execute_command('git commit -m "Uploading documentation for ' 'version {0}"'.format(ver))
def execute_branch(src, dst, interactive, directory=None): """ Changes to the destination branch, creates branch and patches/branch if they do not exist. If the dst branch does not exist yet, then it is created by branching the current working branch or the specified SRC_BRANCH. If the patches/dst branch branch does not exist yet then it is created. If the branches are created successful, then the working branch will be set to the dst branch, otherwise the working branch will remain unchanged. :param src: source branch from which to copy :param dst: destination branch :param interactive: if True actions are summarized before committing :param directory: directory in which to preform this action :raises: subprocess.CalledProcessError if any git calls fail """ # Determine if the srouce branch exists if src is None: error("No source specified and/or not a branch currently", exit=True) if branch_exists(src, local_only=False, directory=directory): if not branch_exists(src, local_only=True, directory=directory): debug("Tracking source branch: {0}".format(src)) track_branches(src, directory) elif tag_exists(src): pass else: error("Specified source branch does not exist: {0}".format(src), exit=True) # Determine if the destination branch needs to be created create_dst_branch = False if branch_exists(dst, local_only=False, directory=directory): if not branch_exists(dst, local_only=True, directory=directory): debug("Tracking destination branch: {0}".format(dst)) track_branches(dst, directory) else: create_dst_branch = True # Determine if the destination patches branch needs to be created create_dst_patches_branch = False dst_patches = 'patches/' + dst if branch_exists(dst_patches, False, directory=directory): if not branch_exists(dst_patches, True, directory=directory): track_branches(dst_patches, directory) else: create_dst_patches_branch = True # Summarize if interactive: info("Summary of changes:") if create_dst_branch: info(" " * 22 + "- The specified destination branch, " + ansi('boldon') + dst + ansi('reset') + ", does not exist; it will be created from the source " "branch " + ansi('boldon') + src + ansi('reset')) if create_dst_patches_branch: info(" " * 22 + "- The destination patches branch, " + ansi('boldon') + dst_patches + ansi('reset') + ", does not exist; it will be created") info(" " * 22 + "- The working branch will be set to " + ansi('boldon') + dst + ansi('reset')) if not maybe_continue(): error("Answered no to continue, aborting.", exit=True) # Make changes to the layout current_branch = get_current_branch(directory) try: # Change to the src branch checkout(src, directory=directory) # Create the dst branch if needed if create_dst_branch: create_branch(dst, changeto=True, directory=directory) else: checkout(dst, directory=directory) # Create the dst patches branch if needed if create_dst_patches_branch: create_branch(dst_patches, orphaned=True, directory=directory) # Create the starting config data if it does not exist patches_ls = ls_tree(dst_patches, directory=directory) if 'patches.conf' not in patches_ls: # Patches config not setup, set it up config = { 'parent': src, 'previous': '', 'base': get_commit_hash(dst, directory=directory), 'trim': '', 'trimbase': '' } set_patch_config(dst_patches, config, directory=directory) else: config = get_patch_config(dst_patches, directory=directory) if config['parent'] != src: warning("Updated parent to '{0}' from '{1}'".format( src, config['parent'])) config['parent'] = src config['base'] = get_commit_hash(dst, directory=directory) set_patch_config(dst_patches, config, directory=directory) # Command successful, do not switch back to previous branch current_branch = None finally: if current_branch is not None: checkout(current_branch, directory=directory)
def execute_track(track, track_dict, release_inc, pretend=True, debug=False, fast=False): info("Processing release track settings for '{0}'".format(track)) settings = process_track_settings(track_dict, release_inc) # setup extra settings archive_dir_path = tempfile.mkdtemp() settings['archive_dir_path'] = archive_dir_path if settings['release_tag'] != ':{none}': archive_file = '{name}-{release_tag}.tar.gz'.format(**settings) else: archive_file = '{name}.tar.gz'.format(**settings) settings['archive_path'] = os.path.join(archive_dir_path, archive_file) # execute actions info("", use_prefix=False) info("Executing release track '{0}'".format(track)) for action in track_dict['actions']: if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar': warning("Explicitly skipping bloom-export-upstream for tar.") settings['archive_path'] = settings['vcs_uri'] continue templated_action = template_str(action, settings) info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action)))) if pretend: continue stdout = None stderr = None if bloom.util._quiet: stdout = subprocess.PIPE stderr = subprocess.STDOUT if debug and 'DEBUG' not in os.environ: os.environ['DEBUG'] = '1' if fast and 'BLOOM_UNSAFE' not in os.environ: os.environ['BLOOM_UNSAFE'] = '1' templated_action = templated_action.split() templated_action[0] = find_full_path(templated_action[0]) p = subprocess.Popen(templated_action, stdout=stdout, stderr=stderr, shell=False, env=os.environ.copy()) out, err = p.communicate() if bloom.util._quiet: info(out, use_prefix=False) ret = p.returncode if ret > 0: if 'bloom-generate' in templated_action[0] and ret == code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO: error(fmt(_error + "The following generator action reported that it is missing one or more")) error(fmt(" @|rosdep keys, but that the key exists in other platforms:")) error(fmt("@|'@!{0}'@|").format(templated_action)) info('', use_prefix=False) error(fmt("@|If you are @!@_@{rf}absolutely@| sure that this key is unavailable for the platform in")) error(fmt("@|question, the generator can be skipped and you can proceed with the release.")) if maybe_continue('n', 'Skip generator action and continue with release'): info("\nAction skipped, continuing with release.\n") continue info('', use_prefix=False) error(fmt(_error + "Error running command '@!{0}'@|") .format(templated_action), exit=True) info('', use_prefix=False) if not pretend: # Update the release_inc tracks_dict = get_tracks_dict_raw() tracks_dict['tracks'][track]['release_inc'] = settings['release_inc'] tracks_dict['tracks'][track]['last_version'] = settings['version'] write_tracks_dict_raw(tracks_dict, 'Updating release inc to: ' + str(settings['release_inc']))
def generate_debian(self, data, stamp, debian_distro): info("Generating debian for {0}...".format(debian_distro)) # Resolve dependencies deps = data['Depends'] build_deps = data['BuildDepends'] data = self.resolve_dependencies(data, debian_distro) # Set the distribution data['Distribution'] = debian_distro # Use the time stamp to set the date strings data['Date'] = stamp.strftime('%a, %d %b %Y %T %z') data['YYYY'] = stamp.strftime('%Y') self.summarize_dependency_mapping(data, deps, build_deps) deps_list = [] for dep in data['Depends'].values(): deps_list.extend(dep) data['Depends'] = deps_list build_deps_list = [] for dep in data['BuildDepends'].values(): build_deps_list.extend(dep) data['BuildDepends'] = build_deps_list # Create/Clean the debian folder debian_dir = 'debian' if os.path.exists(debian_dir): if self.interactive: warning("Debian directory exists: " + debian_dir) warning("Do you wish to overwrite it?") if not maybe_continue('y'): error("Answered no to continue, aborting.") return code.ANSWERED_NO_TO_CONTINUE else: warning("Overwriting Debian directory: " + debian_dir) execute_command('git rm -rf ' + debian_dir) execute_command('git commit -m "Clearing previous debian folder"') if os.path.exists(debian_dir): shutil.rmtree(debian_dir) os.makedirs(debian_dir) # Generate the control file from the template self.create_from_template('control', data, debian_dir) # Generate the changelog file self.create_from_template('changelog', data, debian_dir) # Generate the rules file if data['BuildType'] == 'cmake': self.create_from_template('rules.cmake', data, debian_dir, chmod=0755, outfile='rules') elif data['BuildType'] == 'metapackage': self.create_from_template('rules.metapackage', data, debian_dir, chmod=0755, outfile='rules') else: error("Unrecognized BuildType (" + data['BuildType'] + \ ") for package: " + data['Name']) return code.DEBIAN_UNRECOGNIZED_BUILD_TYPE # Generate the gbp.conf file self.create_from_template('gbp.conf', data, debian_dir) # Create the compat file compat_path = os.path.join(debian_dir, 'compat') with open(compat_path, 'w+') as f: print("7", file=f) # Create the source/format file source_dir = os.path.join(debian_dir, 'source') os.makedirs(source_dir) format_path = os.path.join(source_dir, 'format') with open(format_path, 'w+') as f: print("3.0 (quilt)", file=f) # Commit results execute_command('git add ' + debian_dir) execute_command('git commit -m "Generated debian files for ' + \ debian_distro + '"')
def generate_ros_distro_diff(track, repository, distro): distribution_dict = get_distribution_file(distro).get_data() # Get packages packages = get_packages() if len(packages) == 0: warning("No packages found, will not generate 'package: path' entries for rosdistro.") # Get version track_dict = get_tracks_dict_raw()['tracks'][track] last_version = track_dict['last_version'] release_inc = track_dict['release_inc'] version = '{0}-{1}'.format(last_version, release_inc).encode('utf-8') # Create a repository if there isn't already one if repository not in distribution_dict['repositories']: global _user_provided_release_url distribution_dict['repositories'][repository] = {} # Create a release entry if there isn't already one if 'release' not in distribution_dict['repositories'][repository]: distribution_dict['repositories'][repository]['release'.encode('utf-8')] = { 'url'.encode('utf-8'): _user_provided_release_url } # Update the repository repo = distribution_dict['repositories'][repository]['release'] if 'tags' not in repo: repo['tags'.encode('utf-8')] = {} repo['tags']['release'.encode('utf-8')] = generate_release_tag(distro) repo['version'.encode('utf-8')] = version if 'packages' not in repo: repo['packages'.encode('utf-8')] = [] for path, pkg in packages.items(): if pkg.name not in repo['packages']: repo['packages'].append(pkg.name) # Remove any missing packages packages_being_released = [p.name for p in packages.values()] for pkg_name in list(repo['packages']): if pkg_name not in packages_being_released: repo['packages'].remove(pkg_name) repo['packages'].sort() def get_repository_info_from_user(): data = {} while True: vcs_type = safe_input('VCS type [git, svn, hg, bzr]: ') if vcs_type in ['git', 'svn', 'hg', 'bzr']: break error("'{0}' is not a valid vcs type.".format(vcs_type)) if not maybe_continue(msg='Try again'): return {} data['type'] = vcs_type while True: url = safe_input('VCS url: ') if url: break error("Nothing entered for url.") if not maybe_continue(msg='Try again'): return {} data['url'] = url while True: version = safe_input('VCS version [commit, tag, branch, etc]: ') if version: break error("Nothing entered for version.") if not maybe_continue(msg='Try again'): return {} data['version'] = version return data # Ask for doc entry if 'BLOOM_DONT_ASK_FOR_DOCS' not in os.environ: docs = distribution_dict['repositories'][repository].get('doc', {}) if not docs and maybe_continue(msg='Would you like to add documentation information for this repository?'): info("Please enter your repository information for the doc generation job.") info("This information should point to the repository from which documentation should be generated.") docs = get_repository_info_from_user() distribution_dict['repositories'][repository]['doc'] = docs # Ask for source entry if 'BLOOM_DONT_ASK_FOR_SOURCE' not in os.environ: source = distribution_dict['repositories'][repository].get('source', {}) if not source and maybe_continue(msg='Would you like to add source information for this repository?'): info("Please enter information which points ot the active development branch for this repository.") info("This information is used to run continuous integration jobs and for developers to checkout from.") source = get_repository_info_from_user() distribution_dict['repositories'][repository]['source'] = source # Ask for maintainership information if 'BLOOM_DONT_ASK_FOR_MAINTENANCE_STATUS' not in os.environ: status = distribution_dict['repositories'][repository].get('status', None) description = distribution_dict['repositories'][repository].get('status_description', None) if status is None and maybe_continue(msg='Would you like to add a maintenance status for this repository?'): info("Please enter a maintenance status.") info("Valid maintenance statuses:") info("- developed: active development is in progress") info("- maintained: no new development, but bug fixes and pull requests are addressed") info("- end-of-life: should not be used, will disapear at some point") while True: status = safe_input('Status: ') if status in ['developed', 'maintained', 'end-of-life']: break error("'{0}' is not a valid status.".format(status)) if not maybe_continue(msg='Try again'): status = None break if status is not None: info("You can also enter a status description.") info("This is usually reserved for giving a reason when a status is 'end-of-life'.") if description is not None: info("Current status description: {0}".format(description)) description_in = safe_input('Status Description [press Enter for no change]: ') if description_in: description = description_in if status is not None: distribution_dict['repositories'][repository]['status'] = status if description is not None: distribution_dict['repositories'][repository]['status_description'] = description # Do the diff distro_file_name = get_relative_distribution_file_path(distro) updated_distribution_file = rosdistro.DistributionFile(distro, distribution_dict) distro_dump = yaml_from_distribution_file(updated_distribution_file) distro_file_raw = load_url_to_file_handle(get_disitrbution_file_url(distro)).read() if distro_file_raw != distro_dump: # Calculate the diff udiff = difflib.unified_diff(distro_file_raw.splitlines(), distro_dump.splitlines(), fromfile=distro_file_name, tofile=distro_file_name) temp_dir = tempfile.mkdtemp() udiff_file = os.path.join(temp_dir, repository + '-' + version + '.patch') udiff_raw = '' info("Unified diff for the ROS distro file located at '{0}':".format(udiff_file)) for line in udiff: if line.startswith('@@'): udiff_raw += line line = fmt('@{cf}' + sanitize(line)) if line.startswith('+'): if not line.startswith('+++'): line += '\n' udiff_raw += line line = fmt('@{gf}' + sanitize(line)) if line.startswith('-'): if not line.startswith('---'): line += '\n' udiff_raw += line line = fmt('@{rf}' + sanitize(line)) if line.startswith(' '): line += '\n' udiff_raw += line info(line, use_prefix=False, end='') # Assert that only this repository is being changed distro_file_yaml = yaml.load(distro_file_raw) distro_yaml = yaml.load(distro_dump) if 'repositories' in distro_file_yaml: distro_file_repos = distro_file_yaml['repositories'] for repo in distro_yaml['repositories']: if repo == repository: continue if repo not in distro_file_repos or distro_file_repos[repo] != distro_yaml['repositories'][repo]: error("This generated pull request modifies a repository entry other than the one being released.") error("This likely occured because the upstream rosdistro changed during this release.") error("This pull request will abort, please re-run this command with the -p option to try again.", exit=True) # Write the diff out to file with open(udiff_file, 'w+') as f: f.write(udiff_raw) # Return the diff return updated_distribution_file else: warning("This release resulted in no changes to the ROS distro file...") return None
def get_github_interface(quiet=False): def mfa_prompt(oauth_config_path, username): """Explain how to create a token for users with Multi-Factor Authentication configured.""" warning("Receiving 401 when trying to create an oauth token can be caused by the user " "having two-factor authentication enabled.") warning("If 2FA is enabled, the user will have to create an oauth token manually.") warning("A token can be created at https://github.com/settings/tokens") warning("The resulting token can be placed in the '{oauth_config_path}' file as such:" .format(**locals())) info("") warning('{{"github_user": "******", "oauth_token": "TOKEN_GOES_HERE"}}' .format(**locals())) info("") global _gh if _gh is not None: return _gh # First check to see if the oauth token is stored oauth_config_path = os.path.join(os.path.expanduser('~'), '.config', 'bloom') config = {} if os.path.exists(oauth_config_path): with open(oauth_config_path, 'r') as f: config = json.loads(f.read()) token = config.get('oauth_token', None) username = config.get('github_user', None) if token and username: return Github(username, auth=auth_header_from_oauth_token(token), token=token) if not os.path.isdir(os.path.dirname(oauth_config_path)): os.makedirs(os.path.dirname(oauth_config_path)) if quiet: return None # Ok, now we have to ask for the user name and pass word info("") warning("Looks like bloom doesn't have an oauth token for you yet.") warning("Therefore bloom will require your GitHub username and password just this once.") warning("With your GitHub username and password bloom will create an oauth token on your behalf.") warning("The token will be stored in `~/.config/bloom`.") warning("You can delete the token from that file to have a new token generated.") warning("Guard this token like a password, because it allows someone/something to act on your behalf.") warning("If you need to unauthorize it, remove it from the 'Applications' menu in your GitHub account page.") info("") if not maybe_continue('y', "Would you like to create an OAuth token now"): return None token = None while token is None: try: username = getpass.getuser() username = safe_input("GitHub username [{0}]: ".format(username)) or username password = getpass.getpass("GitHub password (never stored): ") except (KeyboardInterrupt, EOFError): return None if not password: error("No password was given, aborting.") return None gh = Github(username, auth=auth_header_from_basic_auth(username, password)) try: token = gh.create_new_bloom_authorization(update_auth=True) with open(oauth_config_path, 'w') as f: config.update({'oauth_token': token, 'github_user': username}) f.write(json.dumps(config)) info("The token '{token}' was created and stored in the bloom config file: '{oauth_config_path}'" .format(**locals())) except GitHubAuthException as exc: error("{0}".format(exc)) mfa_prompt(oauth_config_path, username) except GithubException as exc: error("{0}".format(exc)) info("") if hasattr(exc, 'resp') and '{0}'.format(exc.resp.status) in ['401']: mfa_prompt(oauth_config_path, username) warning("This sometimes fails when the username or password are incorrect, try again?") if not maybe_continue(): return None _gh = gh return gh
def perform_release(repository, track, distro, new_track, interactive): release_repo = get_release_repo(repository, distro) with change_directory(release_repo.get_path()): # Check for push permissions try: info(fmt("@{gf}@!==> @|Testing for push permission on release repository")) check_output('git push', shell=True) except subprocess.CalledProcessError: error("Cannot push to remote release repository.", exit=True) # Check to see if the old bloom.conf exists if check_for_bloom_conf(repository): # Convert to a track info("Old bloom.conf file detected.") info(fmt("@{gf}@!==> @|Converting to bloom.conf to track")) convert_old_bloom_conf(None if new_track else distro) # Check that the track is valid tracks_dict = get_tracks_dict_raw() # If new_track, create the new track first if new_track: if not track: error("You must specify a track when creating a new one.", exit=True) overrides = {'ros_distro': distro} if track in tracks_dict['tracks']: warning("Track '{0}' exists, editing instead...".format(track)) edit_track_cmd(track) else: # Create a new track called <track>, # copying an existing track if possible, # and overriding the ros_distro new_track_cmd(track, copy_track='', overrides=overrides) tracks_dict = get_tracks_dict_raw() if track and track not in tracks_dict['tracks']: error("Given track '{0}' does not exist in release repository." .format(track)) error("Available tracks: " + str(tracks_dict['tracks'].keys()), exit=True) elif not track: tracks = tracks_dict['tracks'].keys() # Error out if there are no tracks if len(tracks) == 0: error("Release repository has no tracks.") info("Manually clone the repository:") info(" git clone {0}".format(release_repo.get_url())) info("And then create a new track:") info(" git-bloom-config new <track name>") error("Run again after creating a track.", exit=True) # Error out if there is more than one track if len(tracks) != 1: error("No track specified and there is not just one track.") error("Please specify one of the available tracks: " + str(tracks), exit=True) # Get the only track track = tracks[0] # Ensure the track is complete track_dict = tracks_dict['tracks'][track] update_track(track_dict) tracks_dict['tracks'][track] = track_dict write_tracks_dict_raw(tracks_dict) # Run the release info(fmt("@{gf}@!==> @|") + "Releasing '{0}' using release track '{1}'" .format(repository, track)) cmd = 'git-bloom-release ' + str(track) info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Release failed, exiting.", exit=True) info(fmt(_success) + "Released '{0}' using release track '{1}' successfully" .format(repository, track)) # Check for pushing if interactive: info("Releasing complete, push?") if not maybe_continue(): error("User answered no to continue prompt, aborting.", exit=True) # Push changes to the repository info(fmt("@{gf}@!==> @|") + "Pushing changes to release repository for '{0}'" .format(repository)) cmd = 'git push --all' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --all'?") if not maybe_continue(): error("Pushing changes failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, exiting.", exit=True) info(fmt(_success) + "Pushed changes successfully") # Push tags to the repository info(fmt("@{gf}@!==> @|") + "Pushing tags to release repository for '{0}'" .format(repository)) cmd = 'git push --tags' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing changes failed, would you like to add '--force' to 'git push --tags'?") if not maybe_continue(): error("Pushing tags failed, exiting.", exit=True) cmd += ' --force' info(fmt("@{bf}@!==> @|@!" + str(cmd))) try: subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError: error("Pushing tags failed, exiting.", exit=True) info(fmt(_success) + "Pushed tags successfully") # Propose github pull request info(fmt("@{gf}@!==> @|") + "Generating pull request to distro file located at '{0}'" .format(ROS_DISTRO_FILE).format(distro)) generate_ros_distro_diff(track, repository, distro) info("In the future this will create a pull request for you, done for now...") info(fmt(_success) + "Pull request opened at: '{0}'".format('Not yet Implemented'))
def execute_track(track, track_dict, release_inc, pretend=True, debug=False, fast=False): info("Processing release track settings for '{0}'".format(track)) settings = process_track_settings(track_dict, release_inc) # setup extra settings archive_dir_path = tempfile.mkdtemp() settings['archive_dir_path'] = archive_dir_path if settings['release_tag'] != ':{none}': archive_file = '{name}-{release_tag}.tar.gz'.format(**settings) else: archive_file = '{name}.tar.gz'.format(**settings) settings['archive_path'] = os.path.join(archive_dir_path, archive_file) # execute actions info("", use_prefix=False) info("Executing release track '{0}'".format(track)) for action in track_dict['actions']: if 'bloom-export-upstream' in action and settings['vcs_type'] == 'tar': warning("Explicitly skipping bloom-export-upstream for tar.") settings['archive_path'] = settings['vcs_uri'] continue templated_action = template_str(action, settings) info(fmt("@{bf}@!==> @|@!" + sanitize(str(templated_action)))) if pretend: continue stdout = None stderr = None if bloom.util._quiet: stdout = subprocess.PIPE stderr = subprocess.STDOUT if debug and 'DEBUG' not in os.environ: os.environ['DEBUG'] = '1' if fast and 'BLOOM_UNSAFE' not in os.environ: os.environ['BLOOM_UNSAFE'] = '1' templated_action = templated_action.split() templated_action[0] = find_full_path(templated_action[0]) p = subprocess.Popen(templated_action, stdout=stdout, stderr=stderr, shell=False, env=os.environ.copy()) out, err = p.communicate() if bloom.util._quiet: info(out, use_prefix=False) ret = p.returncode if ret > 0: if 'bloom-generate' in templated_action[ 0] and ret == code.GENERATOR_NO_ROSDEP_KEY_FOR_DISTRO: error( fmt(_error + "The following generator action reported that it is missing one or more" )) error( fmt(" @|rosdep keys, but that the key exists in other platforms:" )) error(fmt("@|'@!{0}'@|").format(templated_action)) info('', use_prefix=False) error( fmt("@|If you are @!@_@{rf}absolutely@| sure that this key is unavailable for the platform in" )) error( fmt("@|question, the generator can be skipped and you can proceed with the release." )) if maybe_continue( 'n', 'Skip generator action and continue with release'): info("\nAction skipped, continuing with release.\n") continue info('', use_prefix=False) error(fmt(_error + "Error running command '@!{0}'@|").format( templated_action), exit=True) info('', use_prefix=False) if not pretend: # Update the release_inc tracks_dict = get_tracks_dict_raw() tracks_dict['tracks'][track]['release_inc'] = settings['release_inc'] tracks_dict['tracks'][track]['last_version'] = settings['version'] # if release tag is set to ask and a custom value is used if settings['version'] != settings['release_tag']: tracks_dict['tracks'][track]['last_release'] = settings[ 'release_tag'] write_tracks_dict_raw( tracks_dict, 'Updating release inc to: ' + str(settings['release_inc']))
import os from export_bloom_from_src import get_path_and_pythonpath # Setup environment for running commands path, ppath = get_path_and_pythonpath() os.putenv('PATH', path) os.putenv('PYTHONPATH', ppath) from bloom.util import maybe_continue import sys if __name__ == '__main__': sys.exit(0) if maybe_continue() else sys.exit(1)
def generate_substitutions_from_package(package, os_name, os_version, ros_distro, installation_prefix='/usr', deb_inc=0, peer_packages=None, releaser_history=None, fallback_resolver=None, native=False): peer_packages = peer_packages or [] data = {} # Name, Version, Description data['Name'] = package.name data['Version'] = package.version data['Description'] = format_description(package.description) # Websites websites = [str(url) for url in package.urls if url.type == 'website'] homepage = websites[0] if websites else '' if homepage == '': warning("No homepage set, defaulting to ''") data['Homepage'] = homepage # Debian Increment Number data['DebianInc'] = '' if native else '-{0}'.format(deb_inc) # Debian Package Format data['format'] = 'native' if native else 'quilt' # Package name data['Package'] = sanitize_package_name(package.name) # Installation prefix data['InstallationPrefix'] = installation_prefix # Resolve dependencies depends = package.run_depends + package.buildtool_export_depends build_depends = package.build_depends + package.buildtool_depends + package.test_depends unresolved_keys = depends + build_depends + package.replaces + package.conflicts # The installer key is not considered here, but it is checked when the keys are checked before this resolved_deps = resolve_dependencies( unresolved_keys, os_name, os_version, ros_distro, peer_packages + [d.name for d in package.replaces + package.conflicts], fallback_resolver) data['Depends'] = sorted(set(format_depends(depends, resolved_deps))) data['BuildDepends'] = sorted( set(format_depends(build_depends, resolved_deps))) data['Replaces'] = sorted( set(format_depends(package.replaces, resolved_deps))) data['Conflicts'] = sorted( set(format_depends(package.conflicts, resolved_deps))) # Set the distribution data['Distribution'] = os_version # Use the time stamp to set the date strings stamp = datetime.datetime.now(tz.tzlocal()) data['Date'] = stamp.strftime('%a, %d %b %Y %T %z') data['YYYY'] = stamp.strftime('%Y') # Maintainers maintainers = [] for m in package.maintainers: maintainers.append(str(m)) data['Maintainer'] = maintainers[0] data['Maintainers'] = ', '.join(maintainers) # Changelog changelogs = get_changelogs(package, releaser_history) if changelogs and package.version not in [x[0] for x in changelogs]: warning("") warning( "A CHANGELOG.rst was found, but no changelog for this version was found." ) warning( "You REALLY should have a entry (even a blank one) for each version of your package." ) warning("") if not changelogs: # Ensure at least a minimal changelog changelogs = [] if package.version not in [x[0] for x in changelogs]: changelogs.insert(0, ( package.version, get_rfc_2822_date(datetime.datetime.now()), ' * Autogenerated, no changelog for this version found in CHANGELOG.rst.', package.maintainers[0].name, package.maintainers[0].email)) bad_changelog = False # Make sure that the first change log is the version being released if package.version != changelogs[0][0]: error("") error("The version of the first changelog entry '{0}' is not the " "same as the version being currently released '{1}'.".format( package.version, changelogs[0][0])) bad_changelog = True # Make sure that the current version is the latest in the changelog for changelog in changelogs: if parse_version(package.version) < parse_version(changelog[0]): error("") error( "There is at least one changelog entry, '{0}', which has a " "newer version than the version of package '{1}' being released, '{2}'." .format(changelog[0], package.name, package.version)) bad_changelog = True if bad_changelog: error("This is almost certainly by mistake, you should really take a " "look at the changelogs for the package you are releasing.") error("") if not maybe_continue('n', 'Continue anyways'): sys.exit("User quit.") data['changelogs'] = changelogs # Use debhelper version 7 for oneric, otherwise 9 data['debhelper_version'] = 7 if os_version in ['oneiric'] else 9 # Summarize dependencies summarize_dependency_mapping(data, depends, build_depends, resolved_deps) def convertToUnicode(obj): if sys.version_info.major == 2: if isinstance(obj, str): return unicode(obj.decode('utf8')) elif isinstance(obj, unicode): return obj else: if isinstance(obj, bytes): return str(obj.decode('utf8')) elif isinstance(obj, str): return obj if isinstance(obj, list): for i, val in enumerate(obj): obj[i] = convertToUnicode(val) return obj elif isinstance(obj, type(None)): return None elif isinstance(obj, tuple): obj_tmp = list(obj) for i, val in enumerate(obj_tmp): obj_tmp[i] = convertToUnicode(obj_tmp[i]) return tuple(obj_tmp) elif isinstance(obj, int): return obj raise RuntimeError('need to deal with type %s' % (str(type(obj)))) for item in data.items(): data[item[0]] = convertToUnicode(item[1]) return data
def generate_substitutions_from_package( package, os_name, os_version, ros_distro, installation_prefix='/usr', deb_inc=0, peer_packages=None, releaser_history=None, fallback_resolver=None, native=False ): peer_packages = peer_packages or [] data = {} # Name, Version, Description data['Name'] = package.name data['Version'] = package.version data['Description'] = format_description(package.description) # Websites websites = [str(url) for url in package.urls if url.type == 'website'] homepage = websites[0] if websites else '' if homepage == '': warning("No homepage set, defaulting to ''") data['Homepage'] = homepage # Debian Increment Number data['DebianInc'] = '' if native else '-{0}'.format(deb_inc) # Debian Package Format data['format'] = 'native' if native else 'quilt' # Package name data['Package'] = sanitize_package_name(package.name) # Installation prefix data['InstallationPrefix'] = installation_prefix # Resolve dependencies evaluate_package_conditions(package, ros_distro) depends = [ dep for dep in (package.run_depends + package.buildtool_export_depends) if dep.evaluated_condition is not False] build_depends = [ dep for dep in (package.build_depends + package.buildtool_depends + package.test_depends) if dep.evaluated_condition is not False] replaces = [ dep for dep in package.replaces if dep.evaluated_condition is not False] conflicts = [ dep for dep in package.conflicts if dep.evaluated_condition is not False] unresolved_keys = depends + build_depends + replaces + conflicts # The installer key is not considered here, but it is checked when the keys are checked before this resolved_deps = resolve_dependencies(unresolved_keys, os_name, os_version, ros_distro, peer_packages + [d.name for d in (replaces + conflicts)], fallback_resolver) data['Depends'] = sorted( set(format_depends(depends, resolved_deps)) ) data['BuildDepends'] = sorted( set(format_depends(build_depends, resolved_deps)) ) data['Replaces'] = sorted( set(format_depends(replaces, resolved_deps)) ) data['Conflicts'] = sorted( set(format_depends(conflicts, resolved_deps)) ) # Build-type specific substitutions. build_type = package.get_build_type() if build_type == 'catkin': pass elif build_type == 'cmake': pass elif build_type == 'ament_cmake': pass elif build_type == 'ament_python': # Don't set the install-scripts flag if it's already set in setup.cfg. package_path = os.path.abspath(os.path.dirname(package.filename)) setup_cfg_path = os.path.join(package_path, 'setup.cfg') data['pass_install_scripts'] = True if os.path.isfile(setup_cfg_path): setup_cfg = SafeConfigParser() setup_cfg.read([setup_cfg_path]) if ( setup_cfg.has_option('install', 'install-scripts') or setup_cfg.has_option('install', 'install_scripts') ): data['pass_install_scripts'] = False else: error( "Build type '{}' is not supported by this version of bloom.". format(build_type), exit=True) # Set the distribution data['Distribution'] = os_version # Use the time stamp to set the date strings stamp = datetime.datetime.now(tz.tzlocal()) data['Date'] = stamp.strftime('%a, %d %b %Y %T %z') data['YYYY'] = stamp.strftime('%Y') # Maintainers maintainers = [] for m in package.maintainers: maintainers.append(str(m)) data['Maintainer'] = maintainers[0] data['Maintainers'] = ', '.join(maintainers) # Changelog changelogs = get_changelogs(package, releaser_history) if changelogs and package.version not in [x[0] for x in changelogs]: warning("") warning("A CHANGELOG.rst was found, but no changelog for this version was found.") warning("You REALLY should have a entry (even a blank one) for each version of your package.") warning("") if not changelogs: # Ensure at least a minimal changelog changelogs = [] if package.version not in [x[0] for x in changelogs]: changelogs.insert(0, ( package.version, get_rfc_2822_date(datetime.datetime.now()), ' * Autogenerated, no changelog for this version found in CHANGELOG.rst.', package.maintainers[0].name, package.maintainers[0].email )) bad_changelog = False # Make sure that the first change log is the version being released if package.version != changelogs[0][0]: error("") error("The version of the first changelog entry '{0}' is not the " "same as the version being currently released '{1}'." .format(package.version, changelogs[0][0])) bad_changelog = True # Make sure that the current version is the latest in the changelog for changelog in changelogs: if parse_version(package.version) < parse_version(changelog[0]): error("") error("There is at least one changelog entry, '{0}', which has a " "newer version than the version of package '{1}' being released, '{2}'." .format(changelog[0], package.name, package.version)) bad_changelog = True if bad_changelog: error("This is almost certainly by mistake, you should really take a " "look at the changelogs for the package you are releasing.") error("") if not maybe_continue('n', 'Continue anyways'): sys.exit("User quit.") data['changelogs'] = changelogs # Use debhelper version 7 for oneric, otherwise 9 data['debhelper_version'] = 7 if os_version in ['oneiric'] else 9 # Summarize dependencies summarize_dependency_mapping(data, depends, build_depends, resolved_deps) # Copyright licenses = [] separator = '\n' + '=' * 80 + '\n\n' for l in package.licenses: if hasattr(l, 'file') and l.file is not None: license_file = os.path.join(os.path.dirname(package.filename), l.file) if not os.path.exists(license_file): error("License file '{}' is not found.". format(license_file), exit=True) license_text = open(license_file, 'r').read() if not license_text.endswith('\n'): license_text += '\n' licenses.append(license_text) data['Copyright'] = separator.join(licenses) def convertToUnicode(obj): if sys.version_info.major == 2: if isinstance(obj, str): return unicode(obj.decode('utf8')) elif isinstance(obj, unicode): return obj else: if isinstance(obj, bytes): return str(obj.decode('utf8')) elif isinstance(obj, str): return obj if isinstance(obj, list): for i, val in enumerate(obj): obj[i] = convertToUnicode(val) return obj elif isinstance(obj, type(None)): return None elif isinstance(obj, tuple): obj_tmp = list(obj) for i, val in enumerate(obj_tmp): obj_tmp[i] = convertToUnicode(obj_tmp[i]) return tuple(obj_tmp) elif isinstance(obj, int): return obj raise RuntimeError('need to deal with type %s' % (str(type(obj)))) for item in data.items(): data[item[0]] = convertToUnicode(item[1]) return data
def open_pull_request(track, repository, distro): # Get the diff release_file = get_release_file(distro) if repository in release_file.repositories: orig_version = release_file.repositories[repository].version else: orig_version = None updated_release_file = generate_ros_distro_diff(track, repository, distro) if updated_release_file is None: # There were no changes, no pull request required return None version = updated_release_file.repositories[repository].version updated_distro_file = yaml_from_release_file(updated_release_file) # Determine if the distro file is hosted on github... gh_org, gh_repo, gh_branch, gh_path = get_gh_info( get_release_file_url(distro)) if None in [gh_org, gh_repo, gh_branch, gh_path]: warning("Automated pull request only available via github.com") return # Get the github user name gh_username = None bloom_user_path = os.path.join(os.path.expanduser('~'), '.bloom_user') if os.path.exists(bloom_user_path): with open(bloom_user_path, 'r') as f: gh_username = f.read().strip() gh_username = gh_username or getpass.getuser() response = raw_input("github user name [{0}]: ".format(gh_username)) if response: gh_username = response info( "Would you like bloom to store your github user name (~/.bloom_user)?" ) if maybe_continue(): with open(bloom_user_path, 'w') as f: f.write(gh_username) else: with open(bloom_user_path, 'w') as f: f.write(' ') warning( "If you want to have bloom store it in the future remove the ~/.bloom_user file." ) # Get the github password gh_password = getpass.getpass("github password (This is not stored):") if not gh_password or not gh_username: error("Either the github username or github password is not set.") warning("Skipping the pull request...") return # Check for fork info(fmt("@{bf}@!==> @|@!Checking for rosdistro fork on github...")) gh_user_repos = fetch_github_api( 'https://api.github.com/users/{0}/repos'.format(gh_username), use_pagination=True) if gh_user_repos is None: error("Failed to get a list of repositories for user: '******'".format( gh_username)) warning("Skipping the pull request...") return if 'rosdistro' not in [x['name'] for x in gh_user_repos if 'name' in x]: warning( "Github user '{0}' does not have a fork ".format(gh_username) + "of the {0}:{1} repository, create one?".format(gh_org, gh_repo)) if not maybe_continue(): warning("Skipping the pull request...") return # Create a fork create_fork(gh_org, gh_repo, gh_username, gh_password) # Clone the fork info( fmt("@{bf}@!==> @|@!" + "Cloning {0}/{1}...".format(gh_username, gh_repo))) temp_dir = tempfile.mkdtemp() new_branch = None title = "{0}: {1} in '{2}' [bloom]".format(repository, version, gh_path) body = """\ Increasing version of package(s) in repository `{0}`: - previous version: `{1}` - new version: `{2}` - distro file: `{3}` - bloom version: `{4}` """.format(repository, orig_version or 'null', version, gh_path, bloom.__version__) with change_directory(temp_dir): def _my_run(cmd): info(fmt("@{bf}@!==> @|@!" + str(cmd))) # out = check_output(cmd, stderr=subprocess.STDOUT, shell=True) out = None from subprocess import call call(cmd, shell=True) if out: info(out, use_prefix=False) _my_run('git clone https://github.com/{0}/{1}.git'.format( gh_username, gh_repo)) with change_directory(gh_repo): _my_run( 'git remote add bloom https://github.com/{0}/{1}.git'.format( gh_org, gh_repo)) _my_run('git remote update') _my_run('git fetch') track_branches() branches = get_branches() new_branch = 'bloom-{repository}-{count}' count = 0 while new_branch.format(repository=repository, count=count) in branches: count += 1 new_branch = new_branch.format(repository=repository, count=count) # Final check info(fmt("@{cf}Pull Request Title: @{yf}" + title)) info(fmt("@{cf}Pull Request Body : \n@{yf}" + body)) msg = fmt( "@!Open a @|@{cf}pull request@| @!@{kf}from@| @!'@|@!@{bf}" + "{gh_username}/{gh_repo}:{new_branch}".format(**locals()) + "@|@!' @!@{kf}into@| @!'@|@!@{bf}" + "{gh_org}/{gh_repo}:{gh_branch}".format(**locals()) + "@|@!'?") info(msg) if not maybe_continue(): warning("Skipping the pull request...") return _my_run('git checkout -b {0} bloom/{1}'.format( new_branch, gh_branch)) with open('{0}'.format(gh_path), 'w') as f: info( fmt("@{bf}@!==> @|@!Writing new distribution file: ") + str(gh_path)) f.write(updated_distro_file) _my_run('git add {0}'.format(gh_path)) _my_run('git commit -m "{0}"'.format(title)) _my_run('git push origin {0}'.format(new_branch)) # Open the pull request return create_pull_request(gh_org, gh_repo, gh_username, gh_password, gh_branch, new_branch, title, body)
def open_pull_request(track, repository, distro): # Get the diff distribution_file = get_distribution_file(distro) if repository in distribution_file.repositories and \ distribution_file.repositories[repository].release_repository is not None: orig_version = distribution_file.repositories[repository].release_repository.version else: orig_version = None updated_distribution_file = generate_ros_distro_diff(track, repository, distro) if updated_distribution_file is None: # There were no changes, no pull request required return None version = updated_distribution_file.repositories[repository].release_repository.version updated_distro_file_yaml = yaml_from_distribution_file(updated_distribution_file) # Determine if the distro file is hosted on github... base_org, base_repo, base_branch, base_path = get_gh_info(get_disitrbution_file_url(distro)) if None in [base_org, base_repo, base_branch, base_path]: warning("Automated pull request only available via github.com") return # Get the github interface gh = get_github_interface() # Determine the head org/repo for the pull request head_org = gh.username # The head org will always be gh user head_repo = None # Check if the github user and the base org are the same if gh.username == base_org: # If it is, then a fork is not necessary head_repo = base_repo else: info(fmt("@{bf}@!==> @|@!Checking on github for a fork to make the pull request from...")) # It is not, so a fork will be required # Check if a fork already exists on the user's account with the same name base_full_name = '{base_org}/{base_repo}'.format(**locals()) try: repo_data = gh.get_repo(gh.username, base_repo) if repo_data.get('fork', False): # Check if it is a fork # If it is, check that it is a fork of the destination parent = repo_data.get('parent', {}).get('full_name', None) if parent == base_full_name: # This is a valid fork head_repo = base_repo except GithubException as exc: debug("Received GithubException while checking for fork: {exc}".format(**locals())) pass # 404 or unauthorized, but unauthorized should have been caught above # If not head_repo, then either the fork has a different name, or there isn't one if head_repo is None: info(fmt("@{bf}@!==> @|@!" + "{head_org}/{base_repo} is not a fork, searching...".format(**locals()))) # First we should look at every repository for the user and see if they are a fork user_repos = gh.list_repos(gh.username) for repo in user_repos: # If it is a fork and the parent is base_org/base_repo if repo.get('fork', False) and repo.get('parent', {}).get('full_name', '') == base_full_name: # Then this is a valid fork head_repo = repo['name'] # If not head_repo still, a fork does not exist and must be created if head_repo is None: warning("Could not find a fork of {base_full_name} on the {gh.username} Github account." .format(**locals())) warning("Would you like to create one now?") if not maybe_continue(): warning("Skipping the pull request...") return # Create a fork try: gh.create_fork(base_org, base_repo) # Will raise if not successful head_repo = base_repo except GithubException as exc: error("Aborting pull request: {0}".format(exc)) return info(fmt("@{bf}@!==> @|@!" + "Using this fork to make a pull request from: {head_org}/{head_repo}".format(**locals()))) # Clone the fork info(fmt("@{bf}@!==> @|@!" + "Cloning {0}/{1}...".format(head_org, head_repo))) new_branch = None title = "{0}: {1} in '{2}' [bloom]".format(repository, version, base_path) body = """\ Increasing version of package(s) in repository `{0}` to `{2}`: - distro file: `{3}` - bloom version: `{4}` - previous version for package: `{1}` """.format(repository, orig_version or 'null', version, base_path, bloom.__version__) body += get_changelog_summary(generate_release_tag(distro)) with temporary_directory() as temp_dir: def _my_run(cmd, msg=None): if msg: info(fmt("@{bf}@!==> @|@!" + sanitize(msg))) else: info(fmt("@{bf}@!==> @|@!" + sanitize(str(cmd)))) from subprocess import check_call check_call(cmd, shell=True) # Use the oauth token to clone rosdistro_url = 'https://{gh.token}:[email protected]/{base_org}/{base_repo}.git'.format(**locals()) rosdistro_fork_url = 'https://{gh.token}:[email protected]/{head_org}/{head_repo}.git'.format(**locals()) _my_run('mkdir -p {base_repo}'.format(**locals())) with change_directory(base_repo): _my_run('git init') branches = [x['name'] for x in gh.list_branches(head_org, head_repo)] new_branch = 'bloom-{repository}-{count}' count = 0 while new_branch.format(repository=repository, count=count) in branches: count += 1 new_branch = new_branch.format(repository=repository, count=count) # Final check info(fmt("@{cf}Pull Request Title: @{yf}" + title)) info(fmt("@{cf}Pull Request Body : \n@{yf}" + body)) msg = fmt("@!Open a @|@{cf}pull request@| @!@{kf}from@| @!'@|@!@{bf}" + "{head_repo}/{head_repo}:{new_branch}".format(**locals()) + "@|@!' @!@{kf}into@| @!'@|@!@{bf}" + "{base_org}/{base_repo}:{base_branch}".format(**locals()) + "@|@!'?") info(msg) if not maybe_continue(): warning("Skipping the pull request...") return _my_run('git checkout -b {new_branch}'.format(**locals())) _my_run('git pull {rosdistro_url} {base_branch}'.format(**locals()), "Pulling latest rosdistro branch") with open('{0}'.format(base_path), 'w') as f: info(fmt("@{bf}@!==> @|@!Writing new distribution file: ") + str(base_path)) f.write(updated_distro_file_yaml) _my_run('git add {0}'.format(base_path)) _my_run('git commit -m "{0}"'.format(title)) _my_run('git push {rosdistro_fork_url} {new_branch}'.format(**locals()), "Pushing changes to fork") # Open the pull request return gh.create_pull_request(base_org, base_repo, base_branch, head_org, new_branch, title, body)
def open_pull_request(track, repository, distro): # Get the diff release_file = get_release_file(distro) if repository in release_file.repositories: orig_version = release_file.repositories[repository].version else: orig_version = None updated_release_file = generate_ros_distro_diff(track, repository, distro) if updated_release_file is None: # There were no changes, no pull request required return None version = updated_release_file.repositories[repository].version updated_distro_file = yaml_from_release_file(updated_release_file) # Determine if the distro file is hosted on github... gh_org, gh_repo, gh_branch, gh_path = get_gh_info(get_release_file_url(distro)) if None in [gh_org, gh_repo, gh_branch, gh_path]: warning("Automated pull request only available via github.com") return # Get the github user name gh_username = None bloom_user_path = os.path.join(os.path.expanduser("~"), ".bloom_user") if os.path.exists(bloom_user_path): with open(bloom_user_path, "r") as f: gh_username = f.read().strip() gh_username = gh_username or getpass.getuser() response = raw_input("github user name [{0}]: ".format(gh_username)) if response: gh_username = response info("Would you like bloom to store your github user name (~/.bloom_user)?") if maybe_continue(): with open(bloom_user_path, "w") as f: f.write(gh_username) else: with open(bloom_user_path, "w") as f: f.write(" ") warning("If you want to have bloom store it in the future remove the ~/.bloom_user file.") # Get the github password gh_password = getpass.getpass("github password (This is not stored):") if not gh_password or not gh_username: error("Either the github username or github password is not set.") warning("Skipping the pull request...") return # Check for fork info(fmt("@{bf}@!==> @|@!Checking for rosdistro fork on github...")) gh_user_repos = fetch_github_api("https://api.github.com/users/{0}/repos".format(gh_username), use_pagination=True) if gh_user_repos is None: error("Failed to get a list of repositories for user: '******'".format(gh_username)) warning("Skipping the pull request...") return if "rosdistro" not in [x["name"] for x in gh_user_repos if "name" in x]: warning( "Github user '{0}' does not have a fork ".format(gh_username) + "of the {0}:{1} repository, create one?".format(gh_org, gh_repo) ) if not maybe_continue(): warning("Skipping the pull request...") return # Create a fork create_fork(gh_org, gh_repo, gh_username, gh_password) # Clone the fork info(fmt("@{bf}@!==> @|@!" + "Cloning {0}/{1}...".format(gh_username, gh_repo))) temp_dir = tempfile.mkdtemp() new_branch = None title = "{0}: {1} in '{2}' [bloom]".format(repository, version, gh_path) body = """\ Increasing version of package(s) in repository `{0}`: - previous version: `{1}` - new version: `{2}` - distro file: `{3}` - bloom version: `{4}` """.format( repository, orig_version or "null", version, gh_path, bloom.__version__ ) with change_directory(temp_dir): def _my_run(cmd): info(fmt("@{bf}@!==> @|@!" + str(cmd))) # out = check_output(cmd, stderr=subprocess.STDOUT, shell=True) out = None from subprocess import call call(cmd, shell=True) if out: info(out, use_prefix=False) _my_run("git clone https://github.com/{0}/{1}.git".format(gh_username, gh_repo)) with change_directory(gh_repo): _my_run("git remote add bloom https://github.com/{0}/{1}.git".format(gh_org, gh_repo)) _my_run("git remote update") _my_run("git fetch") track_branches() branches = get_branches() new_branch = "bloom-{repository}-{count}" count = 0 while new_branch.format(repository=repository, count=count) in branches: count += 1 new_branch = new_branch.format(repository=repository, count=count) # Final check info(fmt("@{cf}Pull Request Title: @{yf}" + title)) info(fmt("@{cf}Pull Request Body : \n@{yf}" + body)) msg = fmt( "@!Open a @|@{cf}pull request@| @!@{kf}from@| @!'@|@!@{bf}" + "{gh_username}/{gh_repo}:{new_branch}".format(**locals()) + "@|@!' @!@{kf}into@| @!'@|@!@{bf}" + "{gh_org}/{gh_repo}:{gh_branch}".format(**locals()) + "@|@!'?" ) info(msg) if not maybe_continue(): warning("Skipping the pull request...") return _my_run("git checkout -b {0} bloom/{1}".format(new_branch, gh_branch)) with open("{0}".format(gh_path), "w") as f: info(fmt("@{bf}@!==> @|@!Writing new distribution file: ") + str(gh_path)) f.write(updated_distro_file) _my_run("git add {0}".format(gh_path)) _my_run('git commit -m "{0}"'.format(title)) _my_run("git push origin {0}".format(new_branch)) # Open the pull request return create_pull_request(gh_org, gh_repo, gh_username, gh_password, gh_branch, new_branch, title, body)
def generate_substitutions_from_package( package, os_name, os_version, ros_distro, installation_prefix='/usr', deb_inc=0, peer_packages=None, releaser_history=None, fallback_resolver=None ): peer_packages = peer_packages or [] data = {} # Name, Version, Description data['Name'] = package.name data['Version'] = package.version data['Description'] = debianize_string(package.description) # Websites websites = [str(url) for url in package.urls if url.type == 'website'] homepage = websites[0] if websites else '' if homepage == '': warning("No homepage set, defaulting to ''") data['Homepage'] = homepage # Debian Increment Number data['DebianInc'] = deb_inc # Package name data['Package'] = sanitize_package_name(package.name) # Installation prefix data['InstallationPrefix'] = installation_prefix # Resolve dependencies depends = package.run_depends + package.buildtool_export_depends build_depends = package.build_depends + package.buildtool_depends + package.test_depends unresolved_keys = depends + build_depends resolved_deps = resolve_dependencies(unresolved_keys, os_name, os_version, ros_distro, peer_packages, fallback_resolver) data['Depends'] = sorted( set(format_depends(depends, resolved_deps)) ) data['BuildDepends'] = sorted( set(format_depends(build_depends, resolved_deps)) ) # Set the distribution data['Distribution'] = os_version # Use the time stamp to set the date strings stamp = datetime.datetime.now(tz.tzlocal()) data['Date'] = stamp.strftime('%a, %d %b %Y %T %z') data['YYYY'] = stamp.strftime('%Y') # Maintainers maintainers = [] for m in package.maintainers: maintainers.append(str(m)) data['Maintainer'] = maintainers[0] data['Maintainers'] = ', '.join(maintainers) # Changelog changelogs = get_changelogs(package, releaser_history) if changelogs and package.version not in [x[0] for x in changelogs]: warning("") warning("A CHANGELOG.rst was found, but no changelog for this version was found.") warning("You REALLY should have a entry (even a blank one) for each version of your package.") warning("") if not changelogs: # Ensure at least a minimal changelog changelogs = [] if package.version not in [x[0] for x in changelogs]: changelogs.insert(0, ( package.version, get_rfc_2822_date(datetime.datetime.now()), ' * Autogenerated, no changelog for this version found in CHANGELOG.rst.', package.maintainers[0].name, package.maintainers[0].email )) bad_changelog = False # Make sure that the first change log is the version being released if package.version != changelogs[0][0]: error("") error("The version of the first changelog entry '{0}' is not the " "same as the version being currently released '{1}'." .format(package.version, changelogs[0][0])) bad_changelog = True # Make sure that the current version is the latest in the changelog for changelog in changelogs: if parse_version(package.version) < parse_version(changelog[0]): error("") error("There is at least one changelog entry, '{0}', which has a " "newer version than the version of package '{1}' being released, '{2}'." .format(changelog[0], package.name, package.version)) bad_changelog = True if bad_changelog: error("This is almost certainly by mistake, you should really take a " "look at the changelogs for the package you are releasing.") error("") if not maybe_continue('n', 'Continue anyways'): sys.exit("User quit.") data['changelogs'] = changelogs # Use debhelper version 7 for oneric, otherwise 9 data['debhelper_version'] = 7 if os_version in ['oneiric'] else 9 # Summarize dependencies summarize_dependency_mapping(data, depends, build_depends, resolved_deps) def convertToUnicode(obj): if sys.version_info.major == 2: if isinstance(obj, str): return unicode(obj.decode('utf8')) elif isinstance(obj, unicode): return obj else: if isinstance(obj, bytes): return str(obj.decode('utf8')) elif isinstance(obj, str): return obj if isinstance(obj, list): for i, val in enumerate(obj): obj[i] = convertToUnicode(val) return obj elif isinstance(obj, type(None)): return None elif isinstance(obj, tuple): obj_tmp = list(obj) for i, val in enumerate(obj_tmp): obj_tmp[i] = convertToUnicode(obj_tmp[i]) return tuple(obj_tmp) elif isinstance(obj, int): return obj raise RuntimeError('need to deal with type %s' % (str(type(obj)))) for item in data.items(): data[item[0]] = convertToUnicode(item[1]) return data