def load_project_parameters() -> Optional[Dict[str, Union[str, List[str]]]]: loaded_project_parameters: Optional[Dict[str, Union[str, List[str]]]] = None if os.path.isfile(".ars"): with open(".ars", "r", encoding="utf8") as f: loaded_project_parameters = yaml.safe_load(f) print( cyan("Using already existing"), cyan("'.ars'", bold=True), cyan("file settings:"), cyan(str(loaded_project_parameters), bold=True), ) return loaded_project_parameters
def list_projects(args, env, *, custom_zone: Optional[str] = None): folder = project_folder(args, custom_zone=custom_zone) zone = current_zone(args.internalRunMode) if custom_zone is None else custom_zone if zone: env.log(red("= " + zone, bold=True)) items = list(sorted(os.listdir(folder))) available_zones = [] for filename in items: if os.path.isdir(os.path.join(folder, filename)) and filename != "layouts": available_zones.append(filename) if available_zones: env.log("Available zones:") for available_zone in available_zones: env.log("- " + red(available_zone)) env.log("Available projects:") for filename in items: if not os.path.isfile(os.path.join(folder, filename)): continue file_data = open(os.path.join(folder, filename)) project_data = read_project_yml(file_data) env.log( "- " + cyan(os.path.splitext(filename)[0], bold=True) + ": " + project_data["name"] )
def main() -> None: for f in os.listdir("ui"): if not f.endswith(".ui"): continue ui_file = os.path.join("ui", f) target_file = find_target_file(ui_file) if is_newer(ui_file, target_file): print( cyan("IGNORED", bold=True), cyan(target_file, bold=True), cyan("is newer than"), cyan(ui_file, bold=True), ) continue ui_compile(ui_file, target_file)
def main() -> None: files_changed = False for f in os.listdir(FOLDER): if not f.endswith(EXTENSION): continue grpc_file = os.path.join(FOLDER, f) generated_proto_file = find_target_file(grpc_file, suffix="_pb2.py") generated_proto_pyi = find_target_file(grpc_file, suffix="_pb2.pyi") generated_grpc_file = find_target_file(grpc_file, suffix="_pb2_grpc.py") if is_newer(grpc_file, generated_proto_file) and is_newer( grpc_file, generated_grpc_file): print( cyan("IGNORED", bold=True), cyan(grpc_file, bold=True), cyan("is older than both"), cyan(generated_proto_file, bold=True), cyan("and"), cyan(generated_grpc_file, bold=True), ) continue files_changed = True grpc_compile(grpc_file, generated_grpc_file, generated_proto_file, generated_proto_pyi)
def diff_file_from_project( projects_folder: str, project_names: List[str], file_to_edit: str, loaded_project_parameters: Optional[Dict[str, Union[str, List[str]]]], ) -> None: assert loaded_project_parameters for project_name in project_names: project_definition: ProjectDefinition = read_project_definition( projects_folder, project_name) path_mappings: Dict[str, str] = dict() process_folder( ".", project_definition.file_resolver(), loaded_project_parameters, path_mappings, ) if not file_to_edit in path_mappings: continue print( cyan("Diffing"), cyan(file_to_edit, bold=True), cyan("against project"), cyan(project_name, bold=True), ) subprocess.call(["vimdiff", file_to_edit, path_mappings[file_to_edit]]) return print( red("Unable to find file"), red(file_to_edit, bold=True), red("in projects"), red(str(project_names), bold=True), ) sys.exit(2)
def version(): """ Print the current application version """ print( cyan( dedent( r"""\ _ _ __ _ _ __ ___ ___ _ __ (_)___| |_ / _` | '__/ __|/ _ \| '_ \| / __| __| | (_| | | \__ \ (_) | | | | \__ \ |_ \__,_|_| |___/\___/|_| |_|_|___/\__| version: 0.1.master """ ), bold=True, ) ) sys.exit(0)
from termcolor_util import yellow, green, blue, red, gray, cyan, magenta, white if __name__ == '__main__': print(cyan("termcolor_util"), cyan("v1.0.2", bold=True)) for color in [yellow, green, blue, red, gray, cyan, magenta, white]: print(color(color.__name__), color(color.__name__, underline=True), color(color.__name__, bold=True), color(color.__name__, bold=True, underline=True))
def print_all_tracked_versions(versions_to_process: TrackedVersionSet) -> None: for it in versions_to_process: print(cyan(it.name, bold=True), '=>', cyan(it.version))
def main() -> None: colorama.init() parser = argparse.ArgumentParser(description='Versions processor') parser.add_argument( '--display', '-d', metavar='NAME', nargs=1, help='Display the version of a single tracked version.') parser.add_argument( '--all', '-a', '--list', action='store_true', help='Display all the tracked versions and their values.') parser.add_argument('--set', '-s', nargs='+', metavar="NAME=VAL", help='Set values overriding what\'s in the yml files.') parser.add_argument('--load', '-l', metavar="FILE", help='Override versions from the given yml file.') parser.add_argument( '-t', '--tag-name', '--tag', action='store_true', help="Get the current name to use in general tags. If the " "branch name can't be detected from the git repo, the " "$BRANCH_NAME environment variable will be used.") parser.add_argument( '--ignore-missing-parents', action='store_true', help="Ignore missing parents, and simply don't patch the " "values. Upstream values are still being patched if existing.") parser.add_argument( '--version', action='store_true', help='Show the currently installed program version (master)') argv: ProgramArguments = cast(ProgramArguments, parser.parse_args(sys.argv[1:])) if argv.version: print(cyan("version-manager: master")) sys.exit(0) if argv.tag_name: print_current_tag_version() sys.exit(0) default_settings_file = path.realpath( path.join(os.getcwd(), 'versions.json')) override_parameters = get_parameters_from_file(argv.load) override_parameters = get_parameter_values(override_parameters, argv.set) versions_to_process = read_settings_file(default_settings_file, override_parameters, argv.ignore_missing_parents) # Display a single tracked version if argv.display: print_single_tracked_version(argv.display[0], versions_to_process) sys.exit(0) # Display all tracked versions. if argv.all: print_all_tracked_versions(versions_to_process) sys.exit(0) eprint(cyan("Running on %s" % sys.version)) files_to_process: Dict[str, List[Pattern]] = dict() for tracked_version in versions_to_process: for file_name, version_pattern in tracked_version.files.items(): resolved_names = glob.glob(file_name) if not resolved_names: print(red('Unable to find any files for glob %s.' % file_name)) sys.exit(2) for resolved_name in resolved_names: if resolved_name in files_to_process: file_patterns = files_to_process[resolved_name] else: file_patterns = [] files_to_process[resolved_name] = file_patterns file_patterns.append(version_pattern) for resolved_name, version_patterns in files_to_process.items(): with open(resolved_name, 'r', encoding='utf-8') as resolved_file: content = resolved_file.read() new_content = content print(cyan("Patching %s:" % resolved_name)) for version_pattern in version_patterns: tracked_version = version_pattern.tracked_version print( green('* %s@%s' % (tracked_version.name, tracked_version.version))) new_content = version_pattern.apply_pattern(new_content) if version_pattern.match_count != version_pattern.expected_count: print( red('Got %d matches instead of %d.' % (version_pattern.match_count, version_pattern.expected_count))) sys.exit(3) if content == new_content: print( cyan("Content for %s is not changed. Won't patch it." % resolved_name)) continue with open(resolved_name, 'w', encoding='utf-8') as output: output.write(new_content) print(yellow('Updated %s' % resolved_name)) colorama.deinit() sys.exit(0)
def _print_process_task_mappings(self, *, process: Process, indent=0) -> None: print( "{indent}{type} {name} ({id})".format( indent=" " * indent, type=green("process") if process is self.adhesive_process.process else green("sub-process"), name=yellow(process.name, bold=True), id=white(process.id), ) ) indent += 1 for task_id, task in process.tasks.items(): task_impl = self.tasks_impl.get(task_id, None) if isinstance(task, Process): self._print_process_task_mappings(process=task, indent=indent + 1) continue if isinstance(task, Event) and not isinstance(task, MessageEvent): continue if isinstance(task, Gateway): continue if isinstance(task, ScriptTask): print( "{indent}{type} {name} ({id}) -> {none}".format( indent=" " * indent, type=green("script"), name=yellow(task.name, bold=True), id=white(task.id), none=cyan("<bpmn embedded script>", bold=True), ) ) continue if not task_impl: print( "{indent}{type} {name} ({id}) -> {none}".format( indent=" " * indent, type=green("task"), name=yellow(task.name, bold=True), id=white(task.id), none=red("NONE", bold=False), ) ) continue task_type = "task" if isinstance(task, ComplexGateway): task_type = "gateway" elif isinstance(task, UserTask): task_type = "user task" print( "{indent}{type} {name} ({id}) -> {fn} ({file})".format( indent=" " * indent, type=green(task_type), name=yellow(task.name, bold=True), id=white(task.id), file=inspect.getfile(task_impl.code), fn=cyan(task_impl.code.__name__, bold=True), ) )
def process_folder( current_path: str, file_resolver: FileResolver, project_parameters: Dict[str, Union[str, List[str]]], auto_resolve_conflicts: bool, keep_current_files_on_conflict: bool, ) -> None: """ Recursively process the handlebars templates for the given project. """ for file_entry in file_resolver.listdir(): file: ParsedFile = parse_file_name(file_entry.name, project_parameters) full_local_path = os.path.join(current_path, file.name) full_file_path = file_entry.absolute_path if file_entry.name == "HELP.md" or file_entry.name == ".ars": print(cyan("Ignoring file :"), cyan(file_entry.name, bold=True)) continue if file_entry.is_dir: if os.path.isdir(full_local_path): print(cyan("Already exists folder:"), cyan(full_local_path, bold=True)) else: print( yellow("Creating folder :"), yellow(full_local_path, bold=True) ) os.makedirs(full_local_path) process_folder( full_local_path, file_resolver.subentry(file_entry), project_parameters, auto_resolve_conflicts, keep_current_files_on_conflict, ) continue if file.keep_existing and os.path.isfile(full_local_path): print(cyan("Keeping regular file :"), cyan(full_local_path, bold=True)) continue if not file.hbs_template: if not os.path.isfile(full_local_path): if os.path.islink(full_file_path): print( yellow("Linking regular file :"), yellow(full_local_path, bold=True), ) else: print( yellow("Copying regular file :"), yellow(full_local_path, bold=True), ) copy_or_link(full_file_path, full_local_path) continue if filecmp.cmp(full_file_path, full_local_path): print(cyan("No update needed :"), cyan(full_local_path, bold=True)) continue if is_first_file_newer(full_local_path, full_file_path): print( cyan("No update needed ") + cyan("date", bold=True) + cyan(":"), cyan(full_local_path, bold=True), ) continue # we have a conflict. if auto_resolve_conflicts: print( red("Conflict"), red("auto", bold=True), red(" :"), red(full_local_path, bold=True), ) copy_or_link(full_file_path, full_local_path) continue if keep_current_files_on_conflict: print( red("Conflict"), red("keep", bold=True), red(" :"), red(full_local_path, bold=True), ) os.utime(full_local_path, (now(), now())) continue full_local_path_orig = full_local_path + ".orig" shutil.copy(full_local_path, full_local_path_orig, follow_symlinks=True) copy_or_link(full_file_path, full_local_path) # if 'linux' in sys.platform: execute_diff(full_local_path, full_local_path_orig) print(red("Conflict resolved :"), red(full_local_path, bold=True)) continue if os.path.islink(full_file_path): print(red("FATAL ERROR", bold=True)) print(red("Template link found :"), red(full_file_path, bold=True)) sys.exit(1) with open(full_file_path, "r", encoding="utf8") as template_file: template_content = template_file.read() template = pybars.Compiler().compile(template_content) content = template(project_parameters) if not os.path.isfile(full_local_path): print(yellow("Parsing HBS template :"), yellow(full_local_path, bold=True)) with open(full_local_path, "w", encoding="utf8") as content_file: content_file.write(content) shutil.copystat(full_file_path, full_local_path) continue if content == open(full_local_path, "r", encoding="utf8").read(): print(cyan("No update needed :"), cyan(full_local_path, bold=True)) continue if is_first_file_newer(full_local_path, full_file_path): print( cyan("No update needed ") + cyan("date", bold=True) + cyan(":"), cyan(full_local_path, bold=True), ) continue # we have a conflict. if auto_resolve_conflicts: print( red("Conflict"), red("auto", bold=True), red("HBS :"), red(full_local_path, bold=True), ) with open(full_local_path, "w", encoding="utf8") as content_file: content_file.write(content) continue if keep_current_files_on_conflict: print( red("Conflict"), red("auto", bold=True), red("HBS :"), red(full_local_path, bold=True), ) os.utime(full_local_path, (now(), now())) continue # we have a conflict full_local_path_orig = full_local_path + ".orig" shutil.copy(full_local_path, full_local_path_orig, follow_symlinks=True) with open(full_local_path, "w", encoding="utf8") as content_file: content_file.write(content) # if 'linux' in sys.platform: execute_diff(full_local_path, full_local_path_orig) print(red("Conflict resolved HBS:"), red(full_local_path, bold=True))
def generate(ars, auto, keep, template, parameters): """ Generate or update the project sources """ loaded_project_parameters = load_project_parameters() if not template and not loaded_project_parameters: print(red("You need to pass a project name to generate.")) if os.path.isdir(ARS_PROJECTS_FOLDER): print("Available projects (%s):" % cyan(ARS_PROJECTS_FOLDER)) list_project_folder(ARS_PROJECTS_FOLDER, None) else: print(f"{ARS_PROJECTS_FOLDER} folder doesn't exist.") sys.exit(1) # if we have arguments, we need to either create, or augument the projectParameters # with the new settings. project_parameters = ( loaded_project_parameters if loaded_project_parameters else dict() ) # we convert the old projects into the new format. if "NAME" in project_parameters: project_parameters["templates"] = [project_parameters["NAME"]] del project_parameters["NAME"] if ( project_parameters and template and template not in project_parameters["templates"] ): project_parameters["templates"].append(template) elif template and "templates" not in project_parameters: project_parameters["templates"] = [template] # we iterate the rest of the parameters, and augument the projectParameters for i in range(len(parameters)): m = PARAM_RE.match(parameters[i]) param_name = m.group(1) param_value = m.group(3) if m.group(3) else True project_parameters[param_name] = param_value project_parameters[f"arg{i}"] = parameters[i] for project_name in project_parameters["templates"]: project_definition: ProjectDefinition = read_project_definition( ARS_PROJECTS_FOLDER, project_name ) # Generate the actual project. print( cyan("Generating"), cyan(project_name, bold=True), cyan("with"), cyan(str(project_parameters), bold=True), ) if project_definition.generate_ars and ars: with open(".ars", "w", encoding="utf8") as json_file: yaml.safe_dump(project_parameters, json_file) process_folder( ".", project_definition.file_resolver(), project_parameters, auto_resolve_conflicts=auto, keep_current_files_on_conflict=keep, ) for command in project_definition.shell_commands: print(cyan("Running"), cyan(command, bold=True)) template = pybars.Compiler().compile(command) rendered_command = template(project_parameters) os.system(rendered_command)
def move(old_path: str, new_path: str) -> None: """ git mv old/path new/path """ monorepo = read_monorepo_config() old_path = _resolve_in_repo(monorepo, old_path) new_path = _resolve_in_repo(monorepo, new_path) if old_path not in monorepo.repos: print( red(old_path, bold=True), red("not defined in"), red(MONOREPO_CONFIG_FILE, bold=True), ) sys.exit(1) print(cyan("moving"), cyan(old_path, bold=True), cyan("->"), cyan(new_path, bold=True)) current_commit = get_current_commit(project_folder=monorepo.project_folder) remote_commit = get_remote_commit(monorepo=monorepo, old_path=old_path) if monorepo.squash: message = textwrap.dedent(f"""\ git-monorepo: move {old_path} -> {new_path} git-subtree-dir: {new_path} git-subtree-split: {remote_commit} """) else: # FIXME: I'm not sure about the mainline thing, it is supposed # to be the commit in the current tree, presumably for the # subtree to have an easier time to decide what commits # get in. message = textwrap.dedent(f"""\ git-monorepo: move {old_path} -> {new_path} git-subtree-dir: {new_path} git-subtree-mainline: {current_commit} git-subtree-split: {remote_commit} """) # we ensure the path exists os.makedirs(os.path.dirname(new_path), exist_ok=True) subprocess.check_call(["git", "mv", old_path, new_path], cwd=monorepo.project_folder) subprocess.check_call( ["git", "commit", "-m", message], cwd=monorepo.project_folder, ) monorepo.repos[new_path] = monorepo.repos[old_path] del monorepo.repos[old_path] # FIXME: probably wrong location, and wrong commit write_synchronized_commits(monorepo, repo=new_path) print( "⚠️ ⚠️ ⚠️ ", yellow("WARNING", bold=True), "⚠️ ⚠️ ⚠️ ", yellow("don't forget to patch"), yellow(MONOREPO_CONFIG_FILE, bold=True), yellow("with the new location, and remove the old entry"), )