def get_remote_commit(*, monorepo: GitMonorepoConfig, old_path: str) -> str: log = subprocess.check_output(["git", "log", "."], cwd=monorepo.project_folder).decode("utf-8") SUBTREE_DIR_RE = re.compile(r"^\s*" + re.escape(f"git-subtree-dir: {old_path}") + "$") SUBTREE_COMMIT_RE = re.compile(r"^\s*git-subtree-split:\s*([0-9a-f]+)$") found_commit = False for line in log.splitlines(): if not found_commit: subtree_match = SUBTREE_DIR_RE.match(line) if subtree_match: found_commit = True continue m = SUBTREE_COMMIT_RE.match(line) if m: return m.group(1) print( red(f"Unable to find any subtree commit for", red(old_path, bold=True))) sys.exit(1)
def list_projects(args, env, *, custom_zone: Optional[str] = None): folder = project_folder(args, custom_zone=custom_zone) zone = current_zone(args.internalRunMode) if custom_zone is None else custom_zone if zone: env.log(red("= " + zone, bold=True)) items = list(sorted(os.listdir(folder))) available_zones = [] for filename in items: if os.path.isdir(os.path.join(folder, filename)) and filename != "layouts": available_zones.append(filename) if available_zones: env.log("Available zones:") for available_zone in available_zones: env.log("- " + red(available_zone)) env.log("Available projects:") for filename in items: if not os.path.isfile(os.path.join(folder, filename)): continue file_data = open(os.path.join(folder, filename)) project_data = read_project_yml(file_data) env.log( "- " + cyan(os.path.splitext(filename)[0], bold=True) + ": " + project_data["name"] )
def validate_flags(folders: List[str], required: bool) -> None: if required and folders: print( red("You can't specify both"), red("--required", bold=True), red("and"), red("folders", bold=True), ) sys.exit(1)
def select_zone_str(*, args, env: BashEnvironment, zone: str): env.log(red("zone ", bold=True), newline=False) env.set_envvar( "CIPLOGIC_ARCHER_CURRENT_" + args.internalRunMode.upper() + "_ZONE", zone) list_projects(args, env, custom_zone=zone)
def validate_folders_to_update( monorepo: GitMonorepoConfig, folders: List[str], required: bool ) -> None: pull_folders = set(folders) pull_folders.difference_update(monorepo.repos) if pull_folders: print( red("Error:"), red(", ".join(pull_folders), bold=True), red("not found in monorepo projects."), ) sys.exit(1) if not folders and required: print(green("Nothing changed locally.", bold=True), green("Nothing to do.")) sys.exit(0)
def get_current_tag_version() -> str: # if we have BRANCH_NAME in the environment, we use that one, # since it's already found for us. if 'BRANCH_NAME' in os.environ: env_branch_name = os.environ['BRANCH_NAME'] if '/' not in env_branch_name: return env_branch_name return f"0.1.{escape_tag_name(env_branch_name)}" # We try to find if we have an annotated git tag try: current_release_version: str = subprocess.check_output( ["git", "describe"]).decode('utf-8').strip() if not DIVERGED_FROM_RELEASE.match(current_release_version): return current_release_version # => we're on a tagged release except Exception as e: eprint(red(str(e))) # We try to find the current branch name current_branch_name: str = subprocess.check_output( ["git", "rev-parse", "--abbrev-ref", "HEAD"]).decode('utf-8').strip() return f"0.1.{escape_tag_name(current_branch_name)}"
def _resolve_in_repo(monorepo: GitMonorepoConfig, path: str) -> str: """ Resolves a path inside the monorepo, to allow working inside folders """ absolute_path = os.path.abspath(path) if not absolute_path.startswith(monorepo.project_folder): print( red(path, bold=True), red("resolved to"), red(absolute_path, bold=True), red("was not in the project folder:"), red(monorepo.project_folder, bold=True), ) sys.exit(1) return os.path.relpath(absolute_path, monorepo.project_folder)
def diff(file_to_diff: str) -> None: """ Diff a file against the template. If a project is not sent, the first template is being used. """ project_parameters = load_project_parameters() if not project_parameters: print(red("Not in a project."), red(".ars", bold=True), red("file not found.")) sys.exit(1) diff_file_from_project( ARS_PROJECTS_FOLDER, cast(List[str], project_parameters["templates"]), file_to_diff, project_parameters, )
def edit(project: Optional[str], file_to_edit: str) -> None: """ Edit a file from the project. If a template is not used, the first template from the project is used. """ project_parameters = load_project_parameters() if not project_parameters: print(red("Not in a project."), red(".ars", bold=True), red("file not found.")) sys.exit(1) if not project: project = project_parameters["templates"][0] edit_file_from_project( ARS_PROJECTS_FOLDER, project, file_to_edit, load_project_parameters() )
def push(*, project, files_to_push): """ Push a file into the template. If a template is not passed, the first template is used. """ if not project: project_parameters = load_project_parameters() if not project_parameters: print( red("Not in a project."), red(".ars", bold=True), red("file not found.") ) sys.exit(1) project = project_parameters["templates"][0] push_files_to_template(ARS_PROJECTS_FOLDER, project, files_to_push)
def read_project_definition(projects_folder: str, project_name: str) -> ProjectDefinition: full_project_path = os.path.join(projects_folder, project_name) # Simple sanity check to see if there is a project there, instead # of reporting an error. if not os.path.isdir(full_project_path): print( red("Folder"), red(f"'{full_project_path}'", bold=True), red("does not exist, or is not a folder."), ) sys.exit(1) help_file_name = os.path.join(projects_folder, project_name, "HELP.md") if os.path.isfile(help_file_name): with open(help_file_name, encoding="utf-8") as help_file: mdvl.render(help_file.read(), cols=80) result = ProjectDefinition(name=project_name, projects_folder=projects_folder) template_settings_path = os.path.join(full_project_path, ".ars") if not os.path.isfile(template_settings_path): return result with open(template_settings_path, encoding="utf-8") as template_settings_content: settings = yaml.safe_load(template_settings_content.read()) if "noars" in settings and settings["noars"]: result.generate_ars = False if "parents" in settings: for parent in settings["parents"]: parent_project = read_project_definition(projects_folder, parent) result.search_path.extend(parent_project.search_path) result.shell_commands.extend(parent_project.shell_commands) if "commands" in settings: result.shell_commands.extend(settings["commands"]) return result
def read_monorepo_config() -> GitMonorepoConfig: monorepo_config_folder = os.path.abspath(os.curdir) while monorepo_config_folder and not os.path.isfile( os.path.join(monorepo_config_folder, MONOREPO_CONFIG_FILE) ): parent_folder = os.path.dirname(monorepo_config_folder) if parent_folder == monorepo_config_folder: print( red("Unable to find"), red(MONOREPO_CONFIG_FILE, bold=True), red("in any of the parents from"), red(os.path.abspath(os.curdir), bold=True), ) sys.exit(1) monorepo_config_folder = parent_folder project_folder = monorepo_config_folder config_file_name = os.path.join(project_folder, MONOREPO_CONFIG_FILE) with open(config_file_name, "rt") as f: config_data = yaml.safe_load(f) if "branch" in config_data: current_branch = config_data["branch"] else: current_branch = get_current_git_branch(project_folder) squash = config_data.get("squash", True) synchronized_commits = _read_synchronized_commits(project_folder) repos: Dict[str, str] = dict() _merge_repos(path="", repos=repos, data=config_data["mappings"]) return GitMonorepoConfig( repos=repos, current_branch=current_branch, project_folder=project_folder, synchronized_commits=synchronized_commits, squash=squash, )
def diff_file_from_project( projects_folder: str, project_names: List[str], file_to_edit: str, loaded_project_parameters: Optional[Dict[str, Union[str, List[str]]]], ) -> None: assert loaded_project_parameters for project_name in project_names: project_definition: ProjectDefinition = read_project_definition( projects_folder, project_name) path_mappings: Dict[str, str] = dict() process_folder( ".", project_definition.file_resolver(), loaded_project_parameters, path_mappings, ) if not file_to_edit in path_mappings: continue print( cyan("Diffing"), cyan(file_to_edit, bold=True), cyan("against project"), cyan(project_name, bold=True), ) subprocess.call(["vimdiff", file_to_edit, path_mappings[file_to_edit]]) return print( red("Unable to find file"), red(file_to_edit, bold=True), red("in projects"), red(str(project_names), bold=True), ) sys.exit(2)
def download_artifact(path) -> None: artifact_folder = cache_path(os.path.dirname(path)) os.makedirs(artifact_folder, exist_ok=True) for mirror in config.data.mirrors: print(f"Trying to fetch {mirror.url}{path}") auth = None if mirror.auth: auth = (mirror.auth["user"], mirror.auth["pass"]) r = requests.get(f"{mirror.url}{path}", auth=auth) if not r.ok: if r.status_code == 401: print( termcolor_util.red( f"401 UNAUTHORIZED: {mirror.url} failed to resolve {path}: {r}" ) ) if r.status_code == 403: print( termcolor_util.red( f"403 FORBIDDEN: {mirror.url} failed to resolve {path}: {r}" ) ) else: print( termcolor_util.yellow(f"{mirror.url} failed to resolve {path}: {r}") ) continue with open(cache_path(path), "wb") as f: f.write(r.content) return
def print_single_tracked_version( version_name: str, versions_to_process: TrackedVersionSet) -> None: tracked_version = find(lambda it: it.name == version_name, versions_to_process) if not tracked_version: eprint(red( "Tracked version '%s' does not exist. Available are: " "%s." % ( version_name, ", ".join( map(lambda it: it.name, versions_to_process) ) ) )) sys.exit(1) print(tracked_version.version)
def list_folder_in_project( projects_folder: str, folder_to_list: str, loaded_project_parameters: Optional[Dict[str, Union[str, List[str]]]], ) -> None: # While it's possible to have multiple templates in the project, when listing the # current folder, only the first template will be used. if not loaded_project_parameters: print(red("Unable to find a project. .ars file is missing.")) sys.exit(1) if "templates" not in loaded_project_parameters: print(red("The .ars file doesn't contain any templates.")) sys.exit(2) if not loaded_project_parameters["templates"]: print(red("The .ars file templates section is empty.")) sys.exit(3) project_name = loaded_project_parameters["templates"][0] project_definition: ProjectDefinition = read_project_definition( projects_folder, project_name) path_mappings: Dict[str, FileEntry] = dict() process_folder( folder_to_list, project_definition.file_resolver().subentry(path=folder_to_list), loaded_project_parameters, path_mappings, ) local_files = os.listdir(folder_to_list) def is_dir(name: str) -> bool: return os.path.isdir(os.path.join(folder_to_list, name)) local_files.sort(key=lambda it: (not is_dir(it), it.lower())) for file in local_files: local_path_name = os.path.normpath(os.path.join(folder_to_list, file)) if local_path_name in path_mappings: file_entry = path_mappings[local_path_name] if file_entry.is_dir: print( blue(file, bold=True), gray(f"({file_entry.owning_project})", bold=True), ) elif file_entry.is_exe: print( green(file, bold=True), gray(f"({file_entry.owning_project})", bold=True), ) else: print(file, gray(f"({file_entry.owning_project})", bold=True)) else: if os.path.isdir(local_path_name): print(blue(file, bold=True), red(f"(local)", bold=False)) elif os.access(local_path_name, os.X_OK): print(green(file, bold=True), red(f"(local)", bold=False)) else: print(file, red(f"(local)", bold=False))
def generate(ars, auto, keep, template, parameters): """ Generate or update the project sources """ loaded_project_parameters = load_project_parameters() if not template and not loaded_project_parameters: print(red("You need to pass a project name to generate.")) if os.path.isdir(ARS_PROJECTS_FOLDER): print("Available projects (%s):" % cyan(ARS_PROJECTS_FOLDER)) list_project_folder(ARS_PROJECTS_FOLDER, None) else: print(f"{ARS_PROJECTS_FOLDER} folder doesn't exist.") sys.exit(1) # if we have arguments, we need to either create, or augument the projectParameters # with the new settings. project_parameters = ( loaded_project_parameters if loaded_project_parameters else dict() ) # we convert the old projects into the new format. if "NAME" in project_parameters: project_parameters["templates"] = [project_parameters["NAME"]] del project_parameters["NAME"] if ( project_parameters and template and template not in project_parameters["templates"] ): project_parameters["templates"].append(template) elif template and "templates" not in project_parameters: project_parameters["templates"] = [template] # we iterate the rest of the parameters, and augument the projectParameters for i in range(len(parameters)): m = PARAM_RE.match(parameters[i]) param_name = m.group(1) param_value = m.group(3) if m.group(3) else True project_parameters[param_name] = param_value project_parameters[f"arg{i}"] = parameters[i] for project_name in project_parameters["templates"]: project_definition: ProjectDefinition = read_project_definition( ARS_PROJECTS_FOLDER, project_name ) # Generate the actual project. print( cyan("Generating"), cyan(project_name, bold=True), cyan("with"), cyan(str(project_parameters), bold=True), ) if project_definition.generate_ars and ars: with open(".ars", "w", encoding="utf8") as json_file: yaml.safe_dump(project_parameters, json_file) process_folder( ".", project_definition.file_resolver(), project_parameters, auto_resolve_conflicts=auto, keep_current_files_on_conflict=keep, ) for command in project_definition.shell_commands: print(cyan("Running"), cyan(command, bold=True)) template = pybars.Compiler().compile(command) rendered_command = template(project_parameters) os.system(rendered_command)
def red(text: str, bold=False, underline=False) -> str: if not config.current.boolean.color: return text return termcolor_util.red(text, bold=bold, underline=underline)
def process_folder( current_path: str, file_resolver: FileResolver, project_parameters: Dict[str, Union[str, List[str]]], auto_resolve_conflicts: bool, keep_current_files_on_conflict: bool, ) -> None: """ Recursively process the handlebars templates for the given project. """ for file_entry in file_resolver.listdir(): file: ParsedFile = parse_file_name(file_entry.name, project_parameters) full_local_path = os.path.join(current_path, file.name) full_file_path = file_entry.absolute_path if file_entry.name == "HELP.md" or file_entry.name == ".ars": print(cyan("Ignoring file :"), cyan(file_entry.name, bold=True)) continue if file_entry.is_dir: if os.path.isdir(full_local_path): print(cyan("Already exists folder:"), cyan(full_local_path, bold=True)) else: print( yellow("Creating folder :"), yellow(full_local_path, bold=True) ) os.makedirs(full_local_path) process_folder( full_local_path, file_resolver.subentry(file_entry), project_parameters, auto_resolve_conflicts, keep_current_files_on_conflict, ) continue if file.keep_existing and os.path.isfile(full_local_path): print(cyan("Keeping regular file :"), cyan(full_local_path, bold=True)) continue if not file.hbs_template: if not os.path.isfile(full_local_path): if os.path.islink(full_file_path): print( yellow("Linking regular file :"), yellow(full_local_path, bold=True), ) else: print( yellow("Copying regular file :"), yellow(full_local_path, bold=True), ) copy_or_link(full_file_path, full_local_path) continue if filecmp.cmp(full_file_path, full_local_path): print(cyan("No update needed :"), cyan(full_local_path, bold=True)) continue if is_first_file_newer(full_local_path, full_file_path): print( cyan("No update needed ") + cyan("date", bold=True) + cyan(":"), cyan(full_local_path, bold=True), ) continue # we have a conflict. if auto_resolve_conflicts: print( red("Conflict"), red("auto", bold=True), red(" :"), red(full_local_path, bold=True), ) copy_or_link(full_file_path, full_local_path) continue if keep_current_files_on_conflict: print( red("Conflict"), red("keep", bold=True), red(" :"), red(full_local_path, bold=True), ) os.utime(full_local_path, (now(), now())) continue full_local_path_orig = full_local_path + ".orig" shutil.copy(full_local_path, full_local_path_orig, follow_symlinks=True) copy_or_link(full_file_path, full_local_path) # if 'linux' in sys.platform: execute_diff(full_local_path, full_local_path_orig) print(red("Conflict resolved :"), red(full_local_path, bold=True)) continue if os.path.islink(full_file_path): print(red("FATAL ERROR", bold=True)) print(red("Template link found :"), red(full_file_path, bold=True)) sys.exit(1) with open(full_file_path, "r", encoding="utf8") as template_file: template_content = template_file.read() template = pybars.Compiler().compile(template_content) content = template(project_parameters) if not os.path.isfile(full_local_path): print(yellow("Parsing HBS template :"), yellow(full_local_path, bold=True)) with open(full_local_path, "w", encoding="utf8") as content_file: content_file.write(content) shutil.copystat(full_file_path, full_local_path) continue if content == open(full_local_path, "r", encoding="utf8").read(): print(cyan("No update needed :"), cyan(full_local_path, bold=True)) continue if is_first_file_newer(full_local_path, full_file_path): print( cyan("No update needed ") + cyan("date", bold=True) + cyan(":"), cyan(full_local_path, bold=True), ) continue # we have a conflict. if auto_resolve_conflicts: print( red("Conflict"), red("auto", bold=True), red("HBS :"), red(full_local_path, bold=True), ) with open(full_local_path, "w", encoding="utf8") as content_file: content_file.write(content) continue if keep_current_files_on_conflict: print( red("Conflict"), red("auto", bold=True), red("HBS :"), red(full_local_path, bold=True), ) os.utime(full_local_path, (now(), now())) continue # we have a conflict full_local_path_orig = full_local_path + ".orig" shutil.copy(full_local_path, full_local_path_orig, follow_symlinks=True) with open(full_local_path, "w", encoding="utf8") as content_file: content_file.write(content) # if 'linux' in sys.platform: execute_diff(full_local_path, full_local_path_orig) print(red("Conflict resolved HBS:"), red(full_local_path, bold=True))
import addict import yaml from termcolor_util import red LOG = logging.Logger(__name__) # read the configuration file, and return some "decent" error _config_file_path = "mvnproxy.yml" if not os.path.isfile(_config_file_path): home_folder = os.environ.get("HOME") or "" _config_file_path = os.path.join(home_folder, ".mvnproxy") if not os.path.isfile(_config_file_path): print( red("No configuration given.", bold=True), red("Unable to find"), red("mvnproxy.yml", bold=True), red("nor"), red(_config_file_path, bold=True), ) sys.exit(1) # interpolate environment variables with open(_config_file_path, "rt", encoding="utf-8") as f: config_content_template = f.read() config_content = config_content_template.format(**os.environ) data = addict.Dict(cast(Dict, yaml.safe_load(config_content))) if not data.mirrors: print(red("No mirrors defined in"), red(_config_file_path, bold=True))
def main() -> None: colorama.init() parser = argparse.ArgumentParser(description='Versions processor') parser.add_argument( '--display', '-d', metavar='NAME', nargs=1, help='Display the version of a single tracked version.') parser.add_argument( '--all', '-a', '--list', action='store_true', help='Display all the tracked versions and their values.') parser.add_argument('--set', '-s', nargs='+', metavar="NAME=VAL", help='Set values overriding what\'s in the yml files.') parser.add_argument('--load', '-l', metavar="FILE", help='Override versions from the given yml file.') parser.add_argument( '-t', '--tag-name', '--tag', action='store_true', help="Get the current name to use in general tags. If the " "branch name can't be detected from the git repo, the " "$BRANCH_NAME environment variable will be used.") parser.add_argument( '--ignore-missing-parents', action='store_true', help="Ignore missing parents, and simply don't patch the " "values. Upstream values are still being patched if existing.") parser.add_argument( '--version', action='store_true', help='Show the currently installed program version (master)') argv: ProgramArguments = cast(ProgramArguments, parser.parse_args(sys.argv[1:])) if argv.version: print(cyan("version-manager: master")) sys.exit(0) if argv.tag_name: print_current_tag_version() sys.exit(0) default_settings_file = path.realpath( path.join(os.getcwd(), 'versions.json')) override_parameters = get_parameters_from_file(argv.load) override_parameters = get_parameter_values(override_parameters, argv.set) versions_to_process = read_settings_file(default_settings_file, override_parameters, argv.ignore_missing_parents) # Display a single tracked version if argv.display: print_single_tracked_version(argv.display[0], versions_to_process) sys.exit(0) # Display all tracked versions. if argv.all: print_all_tracked_versions(versions_to_process) sys.exit(0) eprint(cyan("Running on %s" % sys.version)) files_to_process: Dict[str, List[Pattern]] = dict() for tracked_version in versions_to_process: for file_name, version_pattern in tracked_version.files.items(): resolved_names = glob.glob(file_name) if not resolved_names: print(red('Unable to find any files for glob %s.' % file_name)) sys.exit(2) for resolved_name in resolved_names: if resolved_name in files_to_process: file_patterns = files_to_process[resolved_name] else: file_patterns = [] files_to_process[resolved_name] = file_patterns file_patterns.append(version_pattern) for resolved_name, version_patterns in files_to_process.items(): with open(resolved_name, 'r', encoding='utf-8') as resolved_file: content = resolved_file.read() new_content = content print(cyan("Patching %s:" % resolved_name)) for version_pattern in version_patterns: tracked_version = version_pattern.tracked_version print( green('* %s@%s' % (tracked_version.name, tracked_version.version))) new_content = version_pattern.apply_pattern(new_content) if version_pattern.match_count != version_pattern.expected_count: print( red('Got %d matches instead of %d.' % (version_pattern.match_count, version_pattern.expected_count))) sys.exit(3) if content == new_content: print( cyan("Content for %s is not changed. Won't patch it." % resolved_name)) continue with open(resolved_name, 'w', encoding='utf-8') as output: output.write(new_content) print(yellow('Updated %s' % resolved_name)) colorama.deinit() sys.exit(0)
def move(old_path: str, new_path: str) -> None: """ git mv old/path new/path """ monorepo = read_monorepo_config() old_path = _resolve_in_repo(monorepo, old_path) new_path = _resolve_in_repo(monorepo, new_path) if old_path not in monorepo.repos: print( red(old_path, bold=True), red("not defined in"), red(MONOREPO_CONFIG_FILE, bold=True), ) sys.exit(1) print(cyan("moving"), cyan(old_path, bold=True), cyan("->"), cyan(new_path, bold=True)) current_commit = get_current_commit(project_folder=monorepo.project_folder) remote_commit = get_remote_commit(monorepo=monorepo, old_path=old_path) if monorepo.squash: message = textwrap.dedent(f"""\ git-monorepo: move {old_path} -> {new_path} git-subtree-dir: {new_path} git-subtree-split: {remote_commit} """) else: # FIXME: I'm not sure about the mainline thing, it is supposed # to be the commit in the current tree, presumably for the # subtree to have an easier time to decide what commits # get in. message = textwrap.dedent(f"""\ git-monorepo: move {old_path} -> {new_path} git-subtree-dir: {new_path} git-subtree-mainline: {current_commit} git-subtree-split: {remote_commit} """) # we ensure the path exists os.makedirs(os.path.dirname(new_path), exist_ok=True) subprocess.check_call(["git", "mv", old_path, new_path], cwd=monorepo.project_folder) subprocess.check_call( ["git", "commit", "-m", message], cwd=monorepo.project_folder, ) monorepo.repos[new_path] = monorepo.repos[old_path] del monorepo.repos[old_path] # FIXME: probably wrong location, and wrong commit write_synchronized_commits(monorepo, repo=new_path) print( "⚠️ ⚠️ ⚠️ ", yellow("WARNING", bold=True), "⚠️ ⚠️ ⚠️ ", yellow("don't forget to patch"), yellow(MONOREPO_CONFIG_FILE, bold=True), yellow("with the new location, and remove the old entry"), )
def report_missing_settings_file(settings_file: str) -> None: print(red("%s configuration file is missing." % settings_file))