def select_ref(result_out: Result, branch_info: BranchInfo, selection: BranchSelection) \ -> [repotools.Ref, const.BranchClass]: if branch_info.local is not None and len( branch_info.local) and branch_info.upstream is not None: if branch_info.local_class[0] != branch_info.upstream_class: result_out.error( os.EX_DATAERR, _("Local and upstream branch have a mismatching branch class." ), None) if not branch_info.upstream.short_name.endswith( '/' + branch_info.local[0].short_name): result_out.error( os.EX_DATAERR, _("Local and upstream branch have a mismatching short name."), None) candidate = None candidate_class = None if selection == BranchSelection.BRANCH_PREFER_LOCAL: candidate = branch_info.local[0] or branch_info.upstream candidate_class = branch_info.local_class[ 0] or branch_info.upstream_class elif selection == BranchSelection.BRANCH_LOCAL_ONLY: candidate = branch_info.local[0] candidate_class = branch_info.local_class[0] elif selection == BranchSelection.BRANCH_PREFER_REMOTE: candidate = branch_info.upstream or branch_info.local[0] candidate_class = branch_info.upstream_class or branch_info.local_class[ 0] elif selection == BranchSelection.BRANCH_REMOTE_ONLY: candidate = branch_info.upstream candidate_class = branch_info.upstream_class return candidate, candidate_class
def evaluate_numeric_increment(result: Result, field_name: str, reset: bool, reset_val: int, strict: bool, a: int, b: int): """ :rtype: bool """ delta = b - a if reset: if b != reset_val: result.error(os.EX_USAGE, _("Version change leaves a gap without a semantic meaning."), _("The field {field_name} must be reset to {reset_val}.") .format(field_name=repr(field_name) , reset_val=reset_val) ) else: if delta > 1: result.error(os.EX_USAGE if strict else os.EX_OK, _("Version change leaves a gap without a semantic meaning."), _("The field {field_name} must not be incremented by more than one.") .format(field_name=repr(field_name)) ) return reset or b > a
def prompt_for_confirmation(context: Context, fail_title: str, message: str, prompt: str): result = Result() if context.batch: result.value = context.assume_yes if not result.value: sys.stdout.write(prompt + ' -' + os.linesep) result.fail(const.EX_ABORTED, fail_title, _("Operation aborted in batch mode.")) else: if message is not None: cli.warn(message) sys.stderr.flush() sys.stdout.flush() if context.assume_yes: sys.stdout.write(prompt + ' y' + os.linesep) result.value = True else: result.value = cli.query_yes_no(sys.stdout, prompt, "no") if result.value is not True: result.error(const.EX_ABORTED_BY_USER, fail_title, _("Operation aborted."), False) return result
def execute_version_change_actions(context: Context, old_version: str, new_version: str): variables = dict(os.environ) variables['OLD_VERSION'] = old_version or '' variables['NEW_VERSION'] = new_version for command in context.config.version_change_actions: command_string = ' '.join(shlex.quote(token) for token in command) if context.verbose >= const.TRACE_VERBOSITY: print(command_string) command = [expand_vars(token, variables) for token in command] proc = subprocess.Popen( args=command, # stdin=subprocess.PIPE, # stdout=subprocess.PIPE, cwd=context.repo.dir, env=None) proc.wait() if proc.returncode != os.EX_OK: context.fail( os.EX_DATAERR, _("version change action failed."), _("{command}\n" "returned with an error.").format(command=command_string))
def version_bump_to_release(version_config: VersionConfig, version: Optional[str], global_seq: Optional[int]): result = Result() version_info = semver.parse_version_info( version) if version is not None else semver.parse_version_info("0.0.0") if version_config.versioning_scheme == VersioningScheme.SEMVER_WITH_SEQ: result.error( os.EX_USAGE, _("Failed to increment version to release: {version}.").format( version=repr(version)), _("Sequential versions cannot be release versions.")) return result if not version_info.prerelease: result.error( os.EX_DATAERR, _("Failed to increment version to release: {version}.").format( version=repr(version)), _("Only pre-release versions can be incremented to a release version." )) if not result.has_errors(): result.value = semver.format_version(version_info.major, version_info.minor, version_info.patch, None, None) return result
def evaluate_prerelease_increment(result: Result, field_name: str, index: int, reset: bool, reset_val: int or str, strict: bool, a: int or str, b: int or str, keywords: list): """ :rtype: bool """ delta = cmp_alnum_token(b, a, keywords) requires_reset = False if delta > 0: requires_reset = True if reset: if b != reset_val: result.error(os.EX_USAGE if strict else os.EX_OK, _("Version change leaves a gap without a semantic meaning."), _("The field {field_name} must be reset to {reset_val}.") .format(field_name=repr(field_name + '[' + str(index) + ']'), reset_val=reset_val) ) else: if delta > 1: result.error(os.EX_USAGE if strict else os.EX_OK, _("Version change leaves a gap without a semantic meaning."), _("The field {field_name} must not be incremented by more than one.") .format(field_name=repr(field_name + '[' + str(index) + ']')) ) return reset or requires_reset
def evaluate_version_increment(a: Version, b: Version, strict: bool, prerelase_keywords_list: list = None): result = Result() initial_version = Version() initial_version.major = 1 initial_version.minor = 0 initial_version.patch = 0 if prerelase_keywords_list is not None and len(prerelase_keywords_list): initial_version.prerelease = list() for index, token_config in enumerate(prerelase_keywords_list): if token_config is not None: if isinstance(token_config, list) and len(token_config): initial_version.prerelease.append(token_config[0]) elif isinstance(token_config, int): initial_version.prerelease.append(token_config) else: raise ValueError() else: initial_version.prerelease.append(0) reset = False reset = evaluate_numeric_increment(result, 'major', reset, initial_version.major, strict, a.major, b.major) reset = evaluate_numeric_increment(result, 'minor', reset, initial_version.minor, strict, a.minor, b.minor) reset = evaluate_numeric_increment(result, 'patch', reset, initial_version.patch, strict, a.patch, b.patch) # check pre-release convention index = 0 for sub_a, sub_b in itertools.zip_longest(a.prerelease or [], b.prerelease or []): keywords = prerelase_keywords_list[index] \ if prerelase_keywords_list is not None \ and index < len(prerelase_keywords_list) \ else None reset = evaluate_prerelease_increment(result, "prerelease", index, reset, initial_version.prerelease[index] if initial_version.prerelease is not None and index < len(initial_version.prerelease) else 0, strict, sub_a, sub_b, keywords) index += 1 if result.has_errors(): result.error(os.EX_USAGE if strict else os.EX_OK, _("Version increment is flawed."), _("A version increment from {version_a} to {version_b} is inconsistent.") .format(version_a=repr(format_version(a)), version_b=repr(format_version(b))) ) return result
def execute_build_steps(command_context: CommandContext, types: list = None): if types is not None: stages = filter(lambda stage: stage.type in types, command_context.context.config.build_stages) else: stages = command_context.context.config.build_stages for stage in stages: for step in stage.steps: step_errors = 0 for command in step.commands: command_string = ' '.join( shlex.quote(token) for token in command) if command_context.context.verbose >= const.TRACE_VERBOSITY: print(command_string) command = [expand_vars(token, os.environ) for token in command] if not command_context.context.dry_run: try: proc = subprocess.Popen( args=command, stdin=subprocess.PIPE, cwd=command_context.context.root) proc.wait() if proc.returncode != os.EX_OK: command_context.fail( os.EX_DATAERR, _("{stage}:{step} failed.").format( stage=stage.name, step=step.name), _("{command}\n" "returned with an error.").format( command=command_string)) except FileNotFoundError as e: step_errors += 1 command_context.fail( os.EX_DATAERR, _("{stage}:{step} failed.").format( stage=stage.name, step=step.name), _("{command}\n" "could not be executed.\n" "File not found: {file}").format( command=command_string, file=e.filename)) if not step_errors: cli.print(stage.name + ":" + step.name + ": OK") else: cli.print(stage.name + ":" + step.name + ": FAILED")
def version_bump_prerelease(version_config: VersionConfig, version: Optional[str], global_seq: Optional[int]): result = Result() version_info = semver.parse_version_info( version) if version is not None else semver.parse_version_info("0.0.0") if version_info.prerelease: prerelease_version_elements = version_info.prerelease.split(".") if len(prerelease_version_elements ) > 0 and prerelease_version_elements[0].upper() == "SNAPSHOT": if len(prerelease_version_elements) == 1: result.error( os.EX_DATAERR, _("The pre-release increment has been skipped."), _("In order to retain Maven compatibility, " "the pre-release component of snapshot versions must not be versioned." )) else: result.error( os.EX_DATAERR, _("Failed to increment the pre-release component of version {version}." ).format(version=repr(version)), _("Snapshot versions must not have a pre-release version.") ) result.value = version elif len(prerelease_version_elements) == 1: if version_config.versioning_scheme != VersioningScheme.SEMVER_WITH_SEQ: result.error( os.EX_DATAERR, _("Failed to increment the pre-release component of version {version}." ).format(version=repr(version)), _("The qualifier {qualifier} must already be versioned."). format(qualifier=repr(prerelease_version_elements[0]))) result.value = semver.bump_prerelease(version) else: result.error( os.EX_DATAERR, _("Failed to increment the pre-release component of version {version}." ).format(version=repr(version)), _("Pre-release increments cannot be performed on release versions." )) if result.has_errors(): result.value = None elif result.value is not None and not semver.compare( result.value, version) > 0: result.value = None if not result.value: result.error( os.EX_SOFTWARE, _("Failed to increment the pre-release of version {version} for unknown reasons." ).format(version=repr(version)), None) return result
def fetch_all_and_ff(context: RepoContext, result_out: Result, remote: [repotools.Remote, str]): # attempt a complete fetch and a fast forward on the current branch remote_name = remote.name if isinstance(remote, repotools.Remote) else remote returncode, out, err = repotools.git(context, 'fetch', '--tags', remote_name) if returncode != os.EX_OK: result_out.warn( _("Failed to fetch from {remote}").format( remote=repr(remote_name)), None) returncode, out, err = repotools.git(context, 'merge', '--ff-only') if returncode != os.EX_OK: result_out.warn(_("Failed to fast forward"), None)
def pre_push(context: Context) -> Result: result = Result() for line in sys.stdin.readlines(): tokens = line.split(' ') if len(tokens) != 4: raise ValueError() cli.print(line) local_ref = tokens[0] local_sha1 = tokens[1] remote_ref = tokens[2] remote_sha1 = tokens[3] command_context = common.get_command_context(context=context, object_arg=remote_ref) common.check_requirements(command_context=command_context, ref=command_context.selected_ref, branch_classes=None, modifiable=True, with_upstream=False, in_sync_with_upstream=False, fail_message=_("Push rejected."), throw=False) return result
def version_bump_qualifier(version_config: VersionConfig, version: Optional[str], global_seq: Optional[int]): result = Result() version_info = semver.parse_version_info( version) if version is not None else semver.parse_version_info("0.0.0") new_qualifier = None if not version_config.qualifiers: result.error( os.EX_USAGE, _("Failed to increment the pre-release qualifier of version {version}." ).format(version=repr(version)), _("The version scheme does not contain qualifiers")) return result if version_info.prerelease: prerelease_version_elements = version_info.prerelease.split(".") qualifier = prerelease_version_elements[0] qualifier_index = version_config.qualifiers.index( qualifier) if qualifier in version_config.qualifiers else -1 if qualifier_index < 0: result.error( os.EX_DATAERR, _("Failed to increment the pre-release qualifier of version {version}." ).format(version=repr(version)), _("The current qualifier is invalid: {qualifier}").format( qualifier=repr(qualifier))) else: qualifier_index += 1 if qualifier_index < len(version_config.qualifiers): new_qualifier = version_config.qualifiers[qualifier_index] else: result.error( os.EX_DATAERR, _("Failed to increment the pre-release qualifier {qualifier} of version {version}." ).format(qualifier=qualifier, version=repr(version)), _("There are no further qualifiers with higher precedence, configured qualifiers are:\n" "{listing}\n" "The sub command 'bump-to-release' may be used for a final bump." ).format(listing='\n'.join( ' - ' + repr(qualifier) for qualifier in version_config.qualifiers))) else: result.error( os.EX_DATAERR, _("Failed to increment the pre-release qualifier of version {version}." ).format(version=version), _("Pre-release increments cannot be performed on release versions." )) if not result.has_errors() and new_qualifier is not None: result.value = semver.format_version(version_info.major, version_info.minor, version_info.patch, new_qualifier + ".1", None) return result
def call(context: Context): command_context = get_command_context(context=context, object_arg=context.args['<object>']) if context.repo is not None: if context.args['--inplace']: build_context = context build_command_context = command_context else: temp_dir = TemporaryDirectory() exported_repo = repotools.git_export( context=context.repo, target_dir=temp_dir.name, object=command_context.selected_commit) build_context = Context.create( { **context.args, **{ '--root': exported_repo.dir, '--config': context.args['--config'], # no override here '--batch': context.batch, '--dry-run': context.dry_run, '--verbose': context.verbose, '--pretty': context.pretty, } }, context.result) build_command_context = get_command_context( context=build_context, object_arg=build_context.args['<object>']) check_requirements( command_context=build_command_context, ref=build_command_context.selected_ref, branch_classes=None, modifiable=True, with_upstream=True, # not context.config.push_to_local in_sync_with_upstream=True, fail_message=_("Build failed."), allow_unversioned_changes=False) else: build_context = context build_command_context = command_context selected_stages = list() for stage_type in const.BUILD_STAGE_TYPES: if build_context.args[stage_type.replace('_', '-')]: selected_stages.append(stage_type) execute_build_steps(build_command_context, selected_stages) return context.result
def download_file(source_uri: str, dest_file: str, hash_hex: str): from urllib import request import hashlib result = Result() hash = bytes.fromhex(hash_hex) download = False if not os.path.exists(dest_file): cli.print("file does not exist: " + dest_file) download = True elif hash_file(hashlib.sha256(), dest_file) != hash: cli.print("file hash does not match: " + dest_file) download = True else: cli.print("keeping file: " + dest_file + ", sha256 matched: " + hash_hex) if download: cli.print("downloading: " + source_uri + " to " + dest_file) request.urlretrieve(url=str(source_uri), filename=dest_file + "~") filesystem.replace_file(dest_file + "~", dest_file) if hash is not None: actual_hash = hash_file(hashlib.sha256(), dest_file) if actual_hash != hash: result.error( os.EX_IOERR, _("File verification failed."), _("The file {file} is expected to hash to {expected_hash},\n" "The actual hash is: {actual_hash}").format( file=repr(dest_file), expected_hash=repr(hash_hex), actual_hash=repr(actual_hash.hex()), )) if not result.has_errors(): result.value = dest_file return result
def cmd_convert_config(context): result = Result() with open(context.args['<input-file>'], mode='r', encoding='utf-8') as in_file: if in_file is None: result.fail(os.EX_USAGE, _("Failed to open input file"), None) return result input = PropertyIO.get_instance_by_filename(in_file.name) with open(context.args['<output-file>'], mode='w', encoding='utf-8') as out_file: if out_file is None: result.fail(os.EX_USAGE, _("Failed to open output file"), None) return result output = PropertyIO.get_instance_by_filename(out_file.name) config = input.from_stream(in_file) output.to_stream(out_file, config) return result
def update_project_property_file(context: Context, prev_properties: dict, new_version: str, new_sequential_version: int, commit_out: CommitInfo): result = Result() result.value = False if context.config.property_file is not None: property_reader = PropertyIO.get_instance_by_filename( context.config.property_file) if property_reader is None: result.fail( os.EX_DATAERR, _("Property file not supported: {path}\n" "Currently supported:\n" "{listing}").format(path=repr(context.config.property_file), listing='\n'.join( ' - ' + type for type in ['*.properties'])), None) properties = update_project_properties(context, prev_properties, new_version, new_sequential_version) property_reader.write_file(context.config.property_file, properties) commit_out.add_file(context.config.property_file) result.value = True else: properties = None var_separator = ' : ' if properties is not None: def log_property(properties: dict, key: str): if key is not None: commit_out.add_message( '#properties[' + utils.quote(key, '"') + ']' + var_separator + cli.if_none(properties.get(key), "null")) for property_key in [ context.config.version_property, context.config.sequence_number_property ]: log_property(properties, property_key) if context.verbose and result.value != 0: print("properties have changed") print("commit message:") print(commit_out.message) return result
def call(context: Context) -> Result: command_context = get_command_context( context=context, object_arg=context.args['<work-branch>'] ) check_in_repo(command_context) object_arg = context.args['<object>'] args = context.args['<git-arg>'] if object_arg is not None: selected_branch = get_branch_by_branch_name_or_version_tag(context, object_arg, BranchSelection.BRANCH_PREFER_LOCAL) if selected_branch is None: command_context.fail(os.EX_USAGE, _("Log failed."), _("Failed to resolve an object for token {object}.") .format(object=repr(object_arg)) ) else: selected_branch = None log_command = ['log'] if context.pretty: log_command.append('--pretty') if context.dry_run: log_command.append('--dry-run') if context.verbose: log_command.append('--verbose') if selected_branch is not None: log_command.append(selected_branch) proc = repotools.git_interactive(context.repo, *(log_command + args)) proc.wait() return context.result
def git_for_line_or_fail(context: RepoContext, result: Result, command: list, error_message: str = None, error_reason: str = None): line = repotools.git_for_line(context, *command) if line is None: if error_message is not None: result.fail(os.EX_DATAERR, error_message, error_reason) else: result.fail( os.EX_DATAERR, _("git {sub_command} failed.").format( sub_command=repr(utils.command_to_str(command))), error_reason) return line
def git_or_fail(context: RepoContext, result: Result, command: list, error_message: str = None, error_reason: str = None): returncode = git(context, command) if returncode != os.EX_OK: if error_message is not None: result.fail(os.EX_DATAERR, error_message, error_reason) else: first_command_token = next( filter(lambda token: not token.startswith('-'), command)) result.fail( os.EX_DATAERR, _("git {sub_command} failed.").format( sub_command=repr(first_command_token)), error_reason)
def pre_commit(context: Context) -> Result: result = Result() command_context = common.get_command_context(context=context, object_arg='HEAD') target_ref = repotools.git_get_current_branch(context.repo) common.check_requirements(command_context=command_context, ref=target_ref, branch_classes=None, modifiable=True, with_upstream=False, in_sync_with_upstream=False, fail_message=_("Commit rejected."), throw=False) return result
def read_properties_in_commit(context: Context, repo: RepoContext, config: dict, commit: str): if config is not None: property_file = config.get(const.CONFIG_PROJECT_PROPERTY_FILE) if property_file is None: return None properties_bytes = repotools.get_file_contents(repo, commit, property_file) if properties_bytes is None: return property_reader = PropertyIO.get_instance_by_filename(property_file) properties = property_reader.from_bytes( properties_bytes, const.DEFAULT_PROPERTY_ENCODING) if properties is None: context.fail(os.EX_DATAERR, _("Failed to parse properties."), None) return properties
def check_requirements(command_context: CommandContext, ref: repotools.Ref, branch_classes: Union[list, None], modifiable: bool, with_upstream: bool, in_sync_with_upstream: bool, fail_message: str, allow_unversioned_changes: bool = None, throw=True): branch_class = get_branch_class(command_context.context, ref) if branch_classes is not None and branch_class not in branch_classes: command_context.error( os.EX_USAGE, fail_message, _("The branch {branch} is of type {type} must be one of these types:{allowed_types}" ).format( branch=repr(ref.name), type=repr( branch_class.name if branch_class is not None else None), allowed_types='\n - ' + '\n - '.join(branch_class.name for branch_class in branch_classes)), throw) if ref.local_branch_name is not None: # check, whether the selected branch/commit is on remote if with_upstream and command_context.selected_branch.upstream is None: command_context.error( os.EX_USAGE, fail_message, _("{branch} does not have an upstream branch.").format( branch=repr(ref.name)), throw) # if branch_info.upstream.short_name != selected_ref.short_name: # result.error(os.EX_USAGE, # _("Version creation failed."), # _("{branch} has an upstream branch with mismatching short name: {remote_branch}.") # .format(branch=repr(selected_ref.name), # remote_branch=repr(branch_info.upstream.name)) # ) if in_sync_with_upstream and command_context.selected_branch.upstream is not None: push_merge_base = repotools.git_merge_base( command_context.context.repo, command_context.selected_commit, command_context.selected_branch.upstream) if push_merge_base is None: command_context.error( os.EX_USAGE, fail_message, _("{branch} does not have a common base with its upstream branch: {remote_branch}" ).format( branch=repr(ref.name), remote_branch=repr( command_context.selected_branch.upstream.name)), throw) elif push_merge_base != command_context.selected_commit: command_context.error( os.EX_USAGE, fail_message, _("{branch} is not in sync with its upstream branch.\n" "Push your changes and try again.").format( branch=repr(ref.name), remote_branch=repr( command_context.selected_branch.upstream.name)), throw) discontinuation_tags, discontinuation_tag_name = get_discontinuation_tags( command_context.context, ref) if modifiable and len(discontinuation_tags): command_context.error( os.EX_USAGE, fail_message, _("{branch} is discontinued.").format(branch=repr(ref.name)), throw) if not allow_unversioned_changes: current_branch = git_get_current_branch(command_context.context.repo) if ref == current_branch: returncode = git( command_context.context.repo, ['diff-index', '--name-status', '--exit-code', current_branch]) if returncode != os.EX_OK: command_context.error( os.EX_USAGE, fail_message, _("{branch} has uncommitted changes.").format( branch=repr(ref.name)), throw)
def check_in_repo(command_context: CommandContext): if command_context.context.repo is None: command_context.fail(os.EX_USAGE, _("No repo at this location."), None)
def get_command_context(context, object_arg: str) -> CommandContext: command_context = CommandContext() command_context.object_arg = object_arg command_context.context = context if context.repo is not None: command_context.upstreams = repotools.git_get_upstreams( context.repo, const.LOCAL_BRANCH_PREFIX) command_context.downstreams = { v: k for k, v in command_context.upstreams.items() } # resolve the full rev name and its hash for consistency selected_ref = None current_branch = repotools.git_get_current_branch(context.repo) affected_main_branches = None if object_arg is None: if current_branch is None: command_context.fail( os.EX_USAGE, _("Operation failed."), _("No object specified and not on a branch (may be an empty repository)." )) commit = current_branch.target.obj_name selected_ref = current_branch else: branch_ref = get_branch_by_branch_name_or_version_tag( context, object_arg, BranchSelection.BRANCH_PREFER_LOCAL) if branch_ref is not None: selected_ref = branch_ref commit = branch_ref.target.obj_name else: branch_ref = repotools.git_rev_parse(context.repo, '--revs-only', '--symbolic-full-name', object_arg) commit = repotools.git_rev_parse(context.repo, '--revs-only', object_arg) if branch_ref is not None: selected_ref = repotools.Ref() selected_ref.name = branch_ref selected_ref.obj_type = 'commit' selected_ref.obj_name = commit if commit is None: command_context.fail( os.EX_USAGE, _("Failed to resolve object {object}.").format( object=repr(object_arg)), _("No corresponding commit found.")) # determine affected branches affected_main_branches = list( filter( lambda ref: (ref.name not in command_context.downstreams and commit in [ reachable_commit.obj_name for reachable_commit in repotools .git_list_commits(context=context.repo, start=None, end=ref, options=['--first-parent']) ]), repotools.git_list_refs( context.repo, '--contains', commit, repotools.create_ref_name(const.REMOTES_PREFIX, context.config.remote_name, 'release'), 'refs/heads/release', 'refs/heads/' + context.config.release_branch_base))) if len(affected_main_branches) == 1: if selected_ref is None or selected_ref.name.startswith( const.LOCAL_TAG_PREFIX): selected_ref = affected_main_branches[0] if selected_ref is None: if len(affected_main_branches) == 0: command_context.fail( os.EX_USAGE, _("Failed to resolve target branch"), _("Failed to resolve branch containing object: {object}"). format(object=repr(object_arg))) else: command_context.fail( os.EX_USAGE, _("Failed to resolve unique branch for object: {object}"). format(object=repr(object_arg)), _("Multiple different branches contain this commit:\n" "{listing}").format(listing='\n'.join( ' - ' + repr(ref.name) for ref in affected_main_branches))) if selected_ref is None or commit is None: command_context.fail( os.EX_USAGE, _("Failed to resolve ref."), _("{object} could not be resolved.").format( object=repr(object_arg))) if context.verbose >= const.INFO_VERBOSITY: cli.print( _("Target branch: {name} ({commit})").format( name=repr(selected_ref.name), commit=selected_ref.target.obj_name)) cli.print(_("Target commit: {commit}").format(commit=commit)) branch_info = get_branch_info(command_context, selected_ref) command_context.selected_ref = selected_ref command_context.selected_commit = commit command_context.selected_branch = branch_info command_context.selected_explicitly = object_arg is not None command_context.affected_main_branches = affected_main_branches command_context.current_branch = current_branch return command_context
def clone_repository(context: Context, branch: str) -> Result: """ :rtype: Result """ result = Result() remote = repotools.git_get_remote(context.repo, context.config.remote_name) if remote is None: result.fail( os.EX_DATAERR, _("Failed to clone repo."), _("The remote {remote} does not exist.").format( remote=repr(context.config.remote_name))) tempdir_path = tempfile.mkdtemp(prefix=os.path.basename(context.repo.dir) + ".gitflow-clone.") try: if os.path.exists(tempdir_path): os.chmod(path=tempdir_path, mode=0o700) if os.path.isdir(tempdir_path): if os.listdir(tempdir_path): result.fail( os.EX_DATAERR, _("Failed to clone repo."), _("Directory is not empty: {path}").format( path=tempdir_path)) else: result.fail( os.EX_DATAERR, _("Failed to clone repo."), _("File is not a directory: {path}").format( path=tempdir_path)) else: result.fail( os.EX_DATAERR, _("Failed to clone repo."), _("File does not exist: {path}").format(path=tempdir_path)) if context.config.push_to_local: returncode, out, err = repotools.git_raw(git=context.repo.git, args=[ 'clone', '--branch', branch, '--shared', context.repo.dir, tempdir_path ], verbose=context.verbose) else: returncode, out, err = repotools.git_raw(git=context.repo.git, args=[ 'clone', '--branch', branch, '--reference', context.repo.dir, remote.url, tempdir_path ], verbose=context.verbose) if returncode != os.EX_OK: result.error(os.EX_DATAERR, _("Failed to clone the repository."), _("An unexpected error occurred.")) except: result.error(os.EX_DATAERR, _("Failed to clone the repository."), _("An unexpected error occurred.")) finally: context.add_subresult(result) if not result.has_errors(): repo = RepoContext() repo.git = context.repo.git repo.dir = tempdir_path repo.verbose = context.repo.verbose result.value = repo else: shutil.rmtree(path=tempdir_path) return result
def main(argv: list = sys.argv) -> int: if ENABLE_PROFILER: import cProfile profiler = cProfile.Profile() profiler.enable() else: profiler = None result = Result() args = docopt.docopt(argv=argv[1:], doc=__doc__, version=const.VERSION, help=True, options_first=False) try: context = Context.create(args, result) except GitFlowException as e: context = None pass # errors are in result if context is not None: try: if context.verbose >= const.DEBUG_VERBOSITY: cli.print("GitFlow version: " + const.VERSION) cli.print("Python version:" + sys.version.replace('\n', ' ')) cli.print("cwd: " + os.getcwd()) if args['--hook'] is not None: if context.verbose >= const.TRACE_VERBOSITY: cli.print('hook=' + args['--hook']) hook_func = cli.get_cmd([ hook_pre_commit, hook_pre_push, ], args['--hook'], 'hook_') try: hook_result = hook_func(context) except GitFlowException as e: hook_result = e.result result.errors.extend(hook_result.errors) else: commands = { 'status': cmd_status, 'bump-major': cmd_bump_major, 'bump-minor': cmd_bump_minor, 'bump-patch': cmd_bump_patch, 'bump-prerelease-type': cmd_bump_prerelease_type, 'bump-prerelease': cmd_bump_prerelease, 'bump-to-release': cmd_bump_to_release, 'bump-to': cmd_bump_to, 'discontinue': cmd_discontinue, 'start': cmd_start, 'finish': cmd_finish, 'log': cmd_log, 'assemble': cmd_build, 'test': cmd_build, 'integration-test': cmd_build, 'drop-cache': cmd_drop_cache, 'convert-config': cmd_convert_config, } command_funcs = list() for command_name, command_func in commands.items(): if args[command_name] is True: command_funcs.append(command_func) if not len(command_funcs): cli.fail(os.EX_SOFTWARE, "unimplemented command") if context.verbose >= const.TRACE_VERBOSITY: cli.print("commands: " + repr(command_funcs)) start_branch = repotools.git_get_current_branch(context.repo) if context.repo is not None else None for command_func in command_funcs: try: command_result = command_func(context) except GitFlowException as e: command_result = e.result result.errors.extend(command_result.errors) if result.has_errors(): break current_branch = repotools.git_get_current_branch(context.repo) if context.repo is not None else None if current_branch is not None and current_branch != start_branch: cli.print(_("You are now on {branch}.") .format(branch=repr(current_branch.short_name) if current_branch is not None else '-')) finally: context.cleanup() exit_code = os.EX_OK if len(result.errors): sys.stderr.flush() sys.stdout.flush() for error in result.errors: if error.exit_code != os.EX_OK and exit_code != os.EX_SOFTWARE: exit_code = error.exit_code cli.eprint('\n'.join(filter(None, [error.message, error.reason]))) # print dry run status, if possible if context is not None: if exit_code == os.EX_OK: if context.dry_run: cli.print('') cli.print("dry run succeeded") else: pass else: if context.dry_run: cli.print('') cli.eprint("dry run failed") else: pass if profiler is not None: profiler.disable() # pr.dump_stats('profile.pstat') profiler.print_stats(sort="calls") return exit_code
def call(context: Context) -> Result: command_context = get_command_context( context=context, object_arg=context.args['<base-object>']) check_in_repo(command_context) check_requirements( command_context=command_context, ref=command_context.selected_ref, branch_classes=None, modifiable=True, with_upstream=True, # not context.config.push_to_local in_sync_with_upstream=True, fail_message=_("Version creation failed.")) selected_work_branch = context.args.get('<work-branch>') if selected_work_branch is not None: selected_work_branch = repotools.create_ref_name(selected_work_branch) if not selected_work_branch.startswith(const.LOCAL_BRANCH_PREFIX): selected_work_branch = const.LOCAL_BRANCH_PREFIX + selected_work_branch branch_match = context.work_branch_matcher.fullmatch( selected_work_branch) if branch_match is None: context.fail( os.EX_USAGE, _("Invalid work branch: {branch}.").format( branch=repr(selected_work_branch)), None) groups = branch_match.groupdict() branch_supertype = groups['prefix'] branch_type = groups['type'] branch_short_name = groups['name'] else: branch_supertype = context.args['<supertype>'] branch_type = context.args['<type>'] branch_short_name = context.args['<name>'] if branch_supertype not in [ const.BRANCH_PREFIX_DEV, const.BRANCH_PREFIX_PROD ]: context.fail( os.EX_USAGE, _("Invalid branch super type: {supertype}.").format( supertype=repr(branch_supertype)), None) work_branch_name = repotools.create_ref_name(branch_supertype, branch_type, branch_short_name) work_branch_ref_name = repotools.create_ref_name(const.LOCAL_BRANCH_PREFIX, work_branch_name) work_branch_class = get_branch_class(context, work_branch_ref_name) if True: work_branch_info = get_branch_info(command_context, work_branch_ref_name) if work_branch_info is not None: context.fail( os.EX_USAGE, _("The branch {branch} already exists locally or remotely."). format(branch=repr(work_branch_name)), None) allowed_base_branch_class = const.BRANCHING[work_branch_class] base_branch, base_branch_class = select_ref( command_context.result, command_context.selected_branch, BranchSelection.BRANCH_PREFER_LOCAL) if not command_context.selected_explicitly and branch_supertype == const.BRANCH_PREFIX_DEV: base_branch_info = get_branch_info( command_context, repotools.create_ref_name(const.LOCAL_BRANCH_PREFIX, context.config.release_branch_base)) base_branch, base_branch_class = select_ref( command_context.result, base_branch_info, BranchSelection.BRANCH_PREFER_LOCAL) if allowed_base_branch_class != base_branch_class: context.fail( os.EX_USAGE, _("The branch {branch} is not a valid base for {supertype} branches." ).format(branch=repr(base_branch.name), supertype=repr(branch_supertype)), None) if base_branch is None: context.fail(os.EX_USAGE, _("Base branch undetermined."), None) if context.verbose: cli.print("branch_name: " + command_context.selected_ref.name) cli.print("work_branch_name: " + work_branch_name) cli.print("base_branch_name: " + base_branch.name) if not context.dry_run and not command_context.has_errors(): index_status = git(context.repo, ['diff-index', 'HEAD', '--']) if index_status == 1: context.fail( os.EX_USAGE, _("Branch creation aborted."), _("You have staged changes in your workspace.\n" "Unstage, commit or stash them and try again.")) elif index_status != 0: context.fail(os.EX_DATAERR, _("Failed to determine index status."), None) git_or_fail( context.repo, command_context.result, [ 'update-ref', work_branch_ref_name, command_context.selected_commit, '' ], _("Failed to create branch {branch_name}.").format( branch_name=work_branch_name)) git_or_fail( context.repo, command_context.result, ['checkout', work_branch_name], _("Failed to checkout branch {branch_name}.").format( branch_name=work_branch_name)) return context.result
def create(args: dict, result_out: Result) -> 'Context': context = Context() context.config: Config = Config() if args is not None: context.args = args context.batch = context.args['--batch'] context.assume_yes = context.args.get('--assume-yes') context.dry_run = context.args.get('--dry-run') # TODO remove this workaround context.verbose = (context.args['--verbose'] + 1) // 2 context.pretty = context.args['--pretty'] else: context.args = dict() # configure CLI cli.set_allow_color(not context.batch) # initialize repo context and attempt to load the config file if '--root' in context.args and context.args['--root'] is not None: context.root = context.args['--root'] context.repo = RepoContext() context.repo.dir = context.root context.repo.verbose = context.verbose context.git_version = repotools.git_version(context.repo) # context.repo.use_root_dir_arg = semver.compare(context.git_version, "2.9.0") >= 0 context.repo.use_root_dir_arg = False repo_root = repotools.git_rev_parse(context.repo, '--show-toplevel') # None when invalid or bare if repo_root is not None: context.repo.dir = repo_root if context.verbose >= const.TRACE_VERBOSITY: cli.print("--------------------------------------------------------------------------------") cli.print("refs in {repo}:".format(repo=context.repo.dir)) cli.print("--------------------------------------------------------------------------------") for ref in repotools.git_list_refs(context.repo): cli.print(repr(ref)) cli.print("--------------------------------------------------------------------------------") config_dir = context.repo.dir else: context.repo = None config_dir = context.root gitflow_config_file: Optional[str] = None if context.args['--config'] is not None: gitflow_config_file = os.path.join(config_dir, context.args['--config']) if gitflow_config_file is None: result_out.fail(os.EX_DATAERR, _("the specified config file does not exist or is not a regular file: {path}.") .format(path=repr(gitflow_config_file)), None ) else: for config_filename in const.DEFAULT_CONFIGURATION_FILE_NAMES: path = os.path.join(config_dir, config_filename) if os.path.exists(path): gitflow_config_file = path break if gitflow_config_file is None: result_out.fail(os.EX_DATAERR, _("config file not found.") .format(path=repr(gitflow_config_file)), _("Default config files are\n:{list}") .format(list=const.DEFAULT_CONFIGURATION_FILE_NAMES) ) if context.verbose >= const.TRACE_VERBOSITY: cli.print("gitflow_config_file: " + gitflow_config_file) with open(gitflow_config_file) as json_file: config = PropertyIO.get_instance_by_filename(gitflow_config_file).from_stream(json_file) else: config = object() build_config_json = config.get(const.CONFIG_BUILD) context.config.version_change_actions = config.get(const.CONFIG_ON_VERSION_CHANGE, []) context.config.build_stages = list() if build_config_json is not None: stages_json = build_config_json.get('stages') if stages_json is not None: for stage_key, stage_json in stages_json.items(): stage = BuildStage() if isinstance(stage_json, dict): stage.type = stage_json.get('type') or stage_key if stage.type not in const.BUILD_STAGE_TYPES: result_out.fail( os.EX_DATAERR, _("Configuration failed."), _("Invalid build stage type {key}." .format(key=repr(stage.type))) ) stage.name = stage_json.get('name') or stage_key stage_labels = stage_json.get('labels') if isinstance(stage_labels, list): stage.labels.extend(stage_labels) else: stage.labels.append(stage_labels) stage_steps_json = stage_json.get('steps') if stage_steps_json is not None: for step_key, step_json in stage_steps_json.items(): step = BuildStep() if isinstance(step_json, dict): step.name = step_json.get('name') or step_key step.commands = step_json.get('commands') stage_labels = stage_json.get('labels') if isinstance(stage_labels, list): stage.labels.extend(stage_labels) else: stage.labels.append(stage_labels) elif isinstance(step_json, list): step.name = step_key step.type = step_key step.commands = step_json else: result_out.fail( os.EX_DATAERR, _("Configuration failed."), _("Invalid build step definition {type} {key}." .format(type=repr(type(step_json)), key=repr(step_key))) ) stage.steps.append(step) elif isinstance(stage_json, list): stage.type = stage_key stage.name = stage_key if len(stage_json): step = BuildStep() step.name = '#' step.commands = stage_json stage.steps.append(step) else: result_out.fail( os.EX_DATAERR, _("Configuration failed."), _("Invalid build stage definition {key}." .format(key=repr(stage_key))) ) context.config.build_stages.append(stage) context.config.build_stages.sort(key=utils.cmp_to_key(lambda stage_a, stage_b: const.BUILD_STAGE_TYPES.index(stage_a.type) - const.BUILD_STAGE_TYPES.index(stage_b.type) ), reverse=False ) # project properties config context.config.property_file = config.get(const.CONFIG_PROJECT_PROPERTY_FILE) if context.config.property_file is not None: context.config.property_file = os.path.join(context.root, context.config.property_file) context.config.version_property = config.get(const.CONFIG_VERSION_PROPERTY) context.config.sequence_number_property = config.get( const.CONFIG_SEQUENCE_NUMBER_PROPERTY) context.config.version_property = config.get( const.CONFIG_VERSION_PROPERTY) property_names = [property for property in [context.config.sequence_number_property, context.config.version_property] if property is not None] duplicate_property_names = [item for item, count in collections.Counter(property_names).items() if count > 1] if len(duplicate_property_names): result_out.fail(os.EX_DATAERR, _("Configuration failed."), _("Duplicate property names: {duplicate_property_names}").format( duplicate_property_names=', '.join(duplicate_property_names)) ) # version config context.config.version_config = VersionConfig() versioning_scheme = config.get(const.CONFIG_VERSIONING_SCHEME, const.DEFAULT_VERSIONING_SCHEME) if versioning_scheme not in const.VERSIONING_SCHEMES: result_out.fail(os.EX_DATAERR, _("Configuration failed."), _("The versioning scheme {versioning_scheme} is invalid.").format( versioning_scheme=utils.quote(versioning_scheme, '\''))) context.config.version_config.versioning_scheme = const.VERSIONING_SCHEMES[versioning_scheme] if context.config.version_config.versioning_scheme == VersioningScheme.SEMVER: qualifiers = config.get(const.CONFIG_VERSION_TYPES, const.DEFAULT_PRE_RELEASE_QUALIFIERS) if isinstance(qualifiers, str): qualifiers = [qualifier.strip() for qualifier in qualifiers.split(",")] if qualifiers != sorted(qualifiers): result_out.fail( os.EX_DATAERR, _("Configuration failed."), _("Pre-release qualifiers are not specified in ascending order.") ) context.config.version_config.qualifiers = qualifiers context.config.version_config.initial_version = const.DEFAULT_INITIAL_VERSION elif context.config.version_config.versioning_scheme == VersioningScheme.SEMVER_WITH_SEQ: context.config.version_config.qualifiers = None context.config.version_config.initial_version = const.DEFAULT_INITIAL_SEQ_VERSION else: context.fail(os.EX_CONFIG, "configuration error", "invalid versioning scheme") # branch config context.config.remote_name = "origin" context.config.release_branch_base = config.get(const.CONFIG_RELEASE_BRANCH_BASE, const.DEFAULT_RELEASE_BRANCH_BASE) remote_prefix = repotools.create_ref_name(const.REMOTES_PREFIX, context.config.remote_name) context.release_base_branch_matcher = VersionMatcher( [const.LOCAL_BRANCH_PREFIX, remote_prefix], None, re.escape(context.config.release_branch_base), ) context.release_branch_matcher = VersionMatcher( [const.LOCAL_BRANCH_PREFIX, remote_prefix], config.get( const.CONFIG_RELEASE_BRANCH_PREFIX, const.DEFAULT_RELEASE_BRANCH_PREFIX), config.get( const.CONFIG_RELEASE_BRANCH_PATTERN, const.DEFAULT_RELEASE_BRANCH_PATTERN), ) context.work_branch_matcher = VersionMatcher( [const.LOCAL_BRANCH_PREFIX, remote_prefix], [const.BRANCH_PREFIX_DEV, const.BRANCH_PREFIX_PROD], config.get( const.CONFIG_WORK_BRANCH_PATTERN, const.DEFAULT_WORK_BRANCH_PATTERN), ) context.version_tag_matcher = VersionMatcher( [const.LOCAL_TAG_PREFIX], config.get( const.CONFIG_VERSION_TAG_PREFIX, const.DEFAULT_VERSION_TAG_PREFIX), config.get( const.CONFIG_VERSION_TAG_PATTERN, const.DEFAULT_SEMVER_VERSION_TAG_PATTERN if context.config.version_config.versioning_scheme == VersioningScheme.SEMVER else const.DEFAULT_SEMVER_WITH_SEQ_VERSION_TAG_PATTERN) ) context.version_tag_matcher.group_unique_code = None \ if context.config.version_config.versioning_scheme == VersioningScheme.SEMVER \ else 'prerelease_type' context.discontinuation_tag_matcher = VersionMatcher( [const.LOCAL_TAG_PREFIX], config.get( const.CONFIG_DISCONTINUATION_TAG_PREFIX, const.DEFAULT_DISCONTINUATION_TAG_PREFIX), config.get( const.CONFIG_DISCONTINUATION_TAG_PATTERN, const.DEFAULT_DISCONTINUATION_TAG_PATTERN), None ) return context
def call(context: Context) -> Result: arg_work_branch = context.args.get('<work-branch>') if arg_work_branch is None: branch_prefix = context.args['<supertype>'] branch_type = context.args['<type>'] branch_name = context.args['<name>'] if branch_prefix is not None or branch_type is not None or branch_name is not None: arg_work_branch = repotools.create_ref_name(branch_prefix, branch_type, branch_name) command_context = get_command_context( context=context, object_arg=arg_work_branch ) check_in_repo(command_context) base_command_context = get_command_context( context=context, object_arg=context.args['<base-object>'] ) check_requirements(command_context=command_context, ref=command_context.selected_ref, branch_classes=[BranchClass.WORK_DEV, BranchClass.WORK_PROD], modifiable=True, with_upstream=True, # not context.config.push_to_local in_sync_with_upstream=True, fail_message=_("Version creation failed.") ) work_branch = None selected_ref_match = context.work_branch_matcher.fullmatch(command_context.selected_ref.name) if selected_ref_match is not None: work_branch = WorkBranch() work_branch.prefix = selected_ref_match.group('prefix') work_branch.type = selected_ref_match.group('type') work_branch.name = selected_ref_match.group('name') else: if command_context.selected_explicitly: context.fail(os.EX_USAGE, _("The ref {branch} does not refer to a work branch.") .format(branch=repr(command_context.selected_ref.name)), None) work_branch_info = get_branch_info(command_context, work_branch.local_ref_name()) if work_branch_info is None: context.fail(os.EX_USAGE, _("The branch {branch} does neither exist locally nor remotely.") .format(branch=repr(work_branch.branch_name())), None) work_branch_ref, work_branch_class = select_ref(command_context.result, work_branch_info, BranchSelection.BRANCH_PREFER_LOCAL) allowed_base_branch_class = const.BRANCHING[work_branch_class] base_branch_info = get_branch_info(base_command_context, base_command_context.selected_ref) base_branch_ref, base_branch_class = select_ref(command_context.result, base_branch_info, BranchSelection.BRANCH_PREFER_LOCAL) if not base_command_context.selected_explicitly: if work_branch.prefix == const.BRANCH_PREFIX_DEV: base_branch_info = get_branch_info(base_command_context, repotools.create_ref_name(const.LOCAL_BRANCH_PREFIX, context.config.release_branch_base)) base_branch_ref, base_branch_class = select_ref(command_context.result, base_branch_info, BranchSelection.BRANCH_PREFER_LOCAL) elif work_branch.prefix == const.BRANCH_PREFIX_PROD: # discover closest merge base in release branches release_branches = repotools.git_list_refs(context.repo, repotools.create_ref_name(const.REMOTES_PREFIX, context.config.remote_name, 'release')) release_branches = list(release_branches) release_branches.sort(reverse=True, key=utils.cmp_to_key(lambda ref_a, ref_b: semver.compare( context.release_branch_matcher.format(ref_a.name), context.release_branch_matcher.format(ref_b.name) ))) for release_branch_ref in release_branches: merge_base = repotools.git_merge_base(context.repo, base_branch_ref, work_branch_ref.name) if merge_base is not None: base_branch_info = get_branch_info(base_command_context, release_branch_ref) base_branch_ref, base_branch_class = select_ref(command_context.result, base_branch_info, BranchSelection.BRANCH_PREFER_LOCAL) break if allowed_base_branch_class != base_branch_class: context.fail(os.EX_USAGE, _("The branch {branch} is not a valid base for {supertype} branches.") .format(branch=repr(base_branch_ref.name), supertype=repr(work_branch.prefix)), None) if base_branch_ref is None: context.fail(os.EX_USAGE, _("Base branch undetermined."), None) if context.verbose: cli.print("branch_name: " + command_context.selected_ref.name) cli.print("work_branch_name: " + work_branch_ref.name) cli.print("base_branch_name: " + base_branch_ref.name) # check, if already merged merge_base = repotools.git_merge_base(context.repo, base_branch_ref, work_branch_ref.name) if work_branch_ref.obj_name == merge_base: cli.print(_("Branch {branch} is already merged.") .format(branch=repr(work_branch_ref.name))) return context.result # check for staged changes index_status = git(context.repo, ['diff-index', 'HEAD', '--']) if index_status == 1: context.fail(os.EX_USAGE, _("Branch creation aborted."), _("You have staged changes in your workspace.\n" "Unstage, commit or stash them and try again.")) elif index_status != 0: context.fail(os.EX_DATAERR, _("Failed to determine index status."), None) if not context.dry_run and not command_context.has_errors(): # perform merge local_branch_ref_name = repotools.create_local_branch_ref_name(base_branch_ref.name) local_branch_name = repotools.create_local_branch_name(base_branch_ref.name) if local_branch_ref_name == base_branch_ref.name: git_or_fail(context.repo, command_context.result, ['checkout', local_branch_name], _("Failed to checkout branch {branch_name}.") .format(branch_name=repr(base_branch_ref.short_name)) ) else: git_or_fail(context.repo, command_context.result, ['checkout', '-b', local_branch_name, base_branch_ref.name], _("Failed to checkout branch {branch_name}.") .format(branch_name=repr(base_branch_ref.short_name)) ) git_or_fail(context.repo, command_context.result, ['merge', '--no-ff', work_branch_ref], _("Failed to merge work branch.\n" "Rebase {work_branch} on {base_branch} and try again") .format(work_branch=repr(work_branch_ref.short_name), base_branch=repr(base_branch_ref.short_name)) ) git_or_fail(context.repo, command_context.result, ['push', context.config.remote_name, local_branch_name], _("Failed to push branch {branch_name}.") .format(branch_name=repr(base_branch_ref.short_name)) ) return context.result
def call(context) -> Result: command_context = get_command_context(context=context, object_arg=context.args['<object>']) check_in_repo(command_context) unique_codes = set() unique_version_codes = list() upstreams = repotools.git_get_upstreams(context.repo) branch_info_dict = dict() if context.args['--all'] > 0: selected_refs = repotools.git_list_refs( context.repo, repotools.create_ref_name(const.REMOTES_PREFIX, context.config.remote_name)) else: selected_refs = [ command_context.selected_ref or command_context.current_branch ] for branch_ref in selected_refs: branch_match = context.release_branch_matcher.fullmatch( branch_ref.name) if branch_match: branch_version = context.release_branch_matcher.to_version( branch_ref.name) branch_version_string = get_branch_version_component_for_version( context, branch_version) discontinuation_tags, discontinuation_tag_name = get_discontinuation_tags( context, branch_ref) update_branch_info(context, branch_info_dict, upstreams, branch_ref) branch_info = branch_info_dict.get(branch_ref.name) discontinued = len(discontinuation_tags) if discontinued: status_color = colors.partial(colors.color, fg='gray') status_error_color = colors.partial(colors.color, fg='red') status_local_color = colors.partial(colors.color, fg='blue') status_remote_color = colors.partial(colors.color, fg='green') else: status_color = colors.partial(colors.color, fg='white', style='bold') status_error_color = colors.partial(colors.color, fg='red', style='bold') status_local_color = colors.partial(colors.color, fg='blue', style='bold') status_remote_color = colors.partial(colors.color, fg='green', style='bold') error_color = colors.partial(colors.color, fg='white', bg='red', style='bold') cli.fcwrite(sys.stdout, status_color, "version: " + branch_version_string + ' [') if branch_info.local is not None: i = 0 for local in branch_info.local: local_branch_color = status_local_color if not branch_info.upstream.short_name.endswith( '/' + local.short_name): command_context.error( os.EX_DATAERR, _("Local and upstream branch have a mismatching short name." ), None) local_branch_color = error_color if i: cli.fcwrite(sys.stdout, status_color, ', ') if context.verbose: cli.fcwrite(sys.stdout, local_branch_color, local.name) else: cli.fcwrite(sys.stdout, local_branch_color, local.short_name) i += 1 if branch_info.upstream is not None: if branch_info.local is not None and len(branch_info.local): cli.fcwrite(sys.stdout, status_color, ' => ') if context.verbose: cli.fcwrite(sys.stdout, status_remote_color, branch_info.upstream.name) else: cli.fcwrite(sys.stdout, status_remote_color, branch_info.upstream.short_name) cli.fcwrite(sys.stdout, status_color, "]") if discontinued: cli.fcwrite(sys.stdout, status_color, ' (' + _('discontinued') + ')') cli.fcwriteln(sys.stdout, status_color) commit_tags = repotools.git_get_branch_tags( context=context.repo, base_branch=context.config.release_branch_base, branch=branch_ref.name, tag_filter=None, commit_tag_comparator=None) for commit, tags in commit_tags: for tag in tags: if context.version_tag_matcher.group_unique_code is not None: tag_match = context.version_tag_matcher.fullmatch( tag.name) if tag_match is not None: unique_code = tag_match.group( context.version_tag_matcher.group_unique_code) version_string = unique_code unique_version_codes.append(int(unique_code)) if unique_code in unique_codes: command_context.error( os.EX_DATAERR, _("Invalid sequential version tag {tag}."). format(tag=tag.name), _("The code element of version {version_string} is not unique." ).format(version_string=version_string)) else: unique_codes.add(unique_code) cli.fcwriteln(sys.stdout, status_color, " code: " + version_string) # print the version tag version_string = context.version_tag_matcher.format( tag.name) if version_string: version_info = semver.parse_version_info( version_string) if version_info.major == branch_version.major and version_info.minor == branch_version.minor: cli.fcwriteln(sys.stdout, status_color, " " + version_string) else: command_context.error( os.EX_DATAERR, _("Invalid version tag {tag}.").format( tag=repr(tag.name)), _("The major.minor part of the new version {new_version}" " does not match the branch version {branch_version}." ).format(new_version=repr(version_string), branch_version=repr( branch_version_string))) cli.fcwriteln(sys.stdout, status_error_color, " " + version_string) unique_version_codes.sort( key=utils.cmp_to_key(lambda a, b: version.cmp_alnum_token(a, b))) last_unique_code = None for unique_code in unique_version_codes: if not (last_unique_code is None or unique_code > last_unique_code): command_context.error( os.EX_DATAERR, _("Version {version} breaks the sequence.").format( version=unique_code), None) last_unique_code = unique_code return context.result