Example #1
0
def recursively_push_file(projects_folder: str, project_name: str,
                          file_name: str) -> None:
    print(
        yellow("Pushing"),
        yellow(file_name, bold=True),
        yellow("to"),
        yellow(project_name, bold=True),
    )

    target_file_name = os.path.join(projects_folder, project_name, file_name)
    if os.path.isdir(file_name):
        pathlib.Path(target_file_name).mkdir(parents=True, exist_ok=True)
        for nested_file_name in os.listdir(file_name):
            recursively_push_file(
                projects_folder=projects_folder,
                project_name=project_name,
                file_name=os.path.join(file_name, nested_file_name),
            )
        return

    pathlib.Path(target_file_name).parent.mkdir(parents=True, exist_ok=True)
    shutil.copy(file_name, target_file_name)
Example #2
0
def download_artifact(path) -> None:
    artifact_folder = cache_path(os.path.dirname(path))
    os.makedirs(artifact_folder, exist_ok=True)

    for mirror in config.data.mirrors:
        print(f"Trying to fetch {mirror.url}{path}")
        auth = None

        if mirror.auth:
            auth = (mirror.auth["user"], mirror.auth["pass"])

        r = requests.get(f"{mirror.url}{path}", auth=auth)

        if not r.ok:
            if r.status_code == 401:
                print(
                    termcolor_util.red(
                        f"401 UNAUTHORIZED: {mirror.url} failed to resolve {path}: {r}"
                    )
                )
            if r.status_code == 403:
                print(
                    termcolor_util.red(
                        f"403 FORBIDDEN: {mirror.url} failed to resolve {path}: {r}"
                    )
                )
            else:
                print(
                    termcolor_util.yellow(f"{mirror.url} failed to resolve {path}: {r}")
                )

            continue

        with open(cache_path(path), "wb") as f:
            f.write(r.content)

        return
    def __init__(self, tracked_version: TrackedVersion,
                 expression: str) -> None:
        m = StringPattern.RE.match(expression)

        if not m:
            raise Exception('Unable to parse %s as a string pattern' %
                            expression)

        if m.group(2) == '##' or m.group(3) == '#':
            eprint(
                yellow(
                    "Version matched using expression '%s' "
                    "still uses the old '##' notation for delimiting the "
                    "version. This is not supported anymore since # denotes "
                    "a comment in YAML. Use '**' instead." % expression))

        regexp_value = ('^()' if m.group(2) == '^^' else '(%s)' % re.escape(m.group(1))) + \
            '(.*?)' + \
            ('$' if m.group(3) == '$$' else '(%s)' % re.escape(m.group(4)))

        self.tracked_version = tracked_version
        self.regex_pattern = RegExPattern(tracked_version,
                                          regexp_value,
                                          extra_flags=re.M)
Example #4
0
def yellow(text: str, bold=False, underline=False) -> str:
    if not config.current.boolean.color:
        return text

    return termcolor_util.yellow(text, bold=bold, underline=underline)
Example #5
0
def grpc_compile(
    grpc_file: str,
    generated_grpc_file: str,
    generated_proto_file: str,
    generated_proto_pyi: str,
) -> None:
    """
    Compiles the UI using pyside2-uic, the python code generator from
    the UI files.
    """
    print(
        yellow("COMPILING"),
        yellow(grpc_file, bold=True),
        yellow("->"),
        yellow(generated_grpc_file, bold=True),
        yellow(","),
        yellow(generated_proto_file, bold=True),
    )

    # ####################################################################
    # Generate the actual sources
    # ####################################################################
    subprocess.check_call([
        "python",
        "-m",
        "grpc_tools.protoc",
        "-I",
        FOLDER,
        "--python_out=oaas_registry/rpc/",
        "--grpc_python_out=oaas_registry/rpc",
        "--mypy_out=oaas_registry/rpc",
        grpc_file,
    ])

    # ####################################################################
    # Patch the imports because the generator can't do it, and prepare
    # for OaaS.
    # ####################################################################
    with open(generated_grpc_file, "rt", encoding="utf-8") as f:
        content = f.read()

        new_content = IMPORT_RE.sub(r"\1 oaas_registry.rpc.\2", content)

        # In OaaS we register as clients directly the type. Unfortunately,
        # to add them into a grpc server we need to use the generated
        # add_to_server
        new_content = convert_to_static_method(new_content)

    with open(generated_grpc_file, "wt", encoding="utf-8") as f:
        f.write(new_content)

    # ####################################################################
    # Run black over the final sources
    # ####################################################################
    subprocess.call([
        "python",
        "-m",
        "black",
        generated_grpc_file,
        generated_proto_file,
        generated_proto_pyi,
    ])
Example #6
0
def main() -> None:
    colorama.init()

    parser = argparse.ArgumentParser(description='Versions processor')

    parser.add_argument(
        '--display',
        '-d',
        metavar='NAME',
        nargs=1,
        help='Display the version of a single tracked version.')
    parser.add_argument(
        '--all',
        '-a',
        '--list',
        action='store_true',
        help='Display all the tracked versions and their values.')
    parser.add_argument('--set',
                        '-s',
                        nargs='+',
                        metavar="NAME=VAL",
                        help='Set values overriding what\'s in the yml files.')
    parser.add_argument('--load',
                        '-l',
                        metavar="FILE",
                        help='Override versions from the given yml file.')
    parser.add_argument(
        '-t',
        '--tag-name',
        '--tag',
        action='store_true',
        help="Get the current name to use in general tags. If the "
        "branch name can't be detected from the git repo, the "
        "$BRANCH_NAME environment variable will be used.")
    parser.add_argument(
        '--ignore-missing-parents',
        action='store_true',
        help="Ignore missing parents, and simply don't patch the "
        "values. Upstream values are still being patched if existing.")
    parser.add_argument(
        '--version',
        action='store_true',
        help='Show the currently installed program version (master)')

    argv: ProgramArguments = cast(ProgramArguments,
                                  parser.parse_args(sys.argv[1:]))

    if argv.version:
        print(cyan("version-manager: master"))
        sys.exit(0)

    if argv.tag_name:
        print_current_tag_version()
        sys.exit(0)

    default_settings_file = path.realpath(
        path.join(os.getcwd(), 'versions.json'))
    override_parameters = get_parameters_from_file(argv.load)
    override_parameters = get_parameter_values(override_parameters, argv.set)
    versions_to_process = read_settings_file(default_settings_file,
                                             override_parameters,
                                             argv.ignore_missing_parents)

    # Display a single tracked version
    if argv.display:
        print_single_tracked_version(argv.display[0], versions_to_process)
        sys.exit(0)

    # Display all tracked versions.
    if argv.all:
        print_all_tracked_versions(versions_to_process)
        sys.exit(0)

    eprint(cyan("Running on %s" % sys.version))

    files_to_process: Dict[str, List[Pattern]] = dict()

    for tracked_version in versions_to_process:
        for file_name, version_pattern in tracked_version.files.items():
            resolved_names = glob.glob(file_name)

            if not resolved_names:
                print(red('Unable to find any files for glob %s.' % file_name))
                sys.exit(2)

            for resolved_name in resolved_names:
                if resolved_name in files_to_process:
                    file_patterns = files_to_process[resolved_name]
                else:
                    file_patterns = []
                    files_to_process[resolved_name] = file_patterns

                file_patterns.append(version_pattern)

    for resolved_name, version_patterns in files_to_process.items():
        with open(resolved_name, 'r', encoding='utf-8') as resolved_file:
            content = resolved_file.read()
            new_content = content

        print(cyan("Patching %s:" % resolved_name))

        for version_pattern in version_patterns:
            tracked_version = version_pattern.tracked_version
            print(
                green('* %s@%s' %
                      (tracked_version.name, tracked_version.version)))

            new_content = version_pattern.apply_pattern(new_content)

            if version_pattern.match_count != version_pattern.expected_count:
                print(
                    red('Got %d matches instead of %d.' %
                        (version_pattern.match_count,
                         version_pattern.expected_count)))
                sys.exit(3)

        if content == new_content:
            print(
                cyan("Content for %s is not changed. Won't patch it." %
                     resolved_name))
            continue

        with open(resolved_name, 'w', encoding='utf-8') as output:
            output.write(new_content)

        print(yellow('Updated %s' % resolved_name))

    colorama.deinit()
    sys.exit(0)
def grpc_compile(
    output_folder: str,
    module_name: str,
    grpc_file: str,
) -> None:
    """
    Compiles the files from the generated proto files
    """
    base_grpc_file = os.path.splitext(os.path.basename(grpc_file))[0]
    generated_grpc_file = os.path.join(output_folder,
                                       base_grpc_file + "_pb2_grpc.py")
    generated_proto_file = os.path.join(output_folder,
                                        base_grpc_file + "_pb2.py")
    generated_proto_pyi = os.path.join(output_folder,
                                       base_grpc_file + "_pb2.pyi")

    print(
        yellow("COMPILING"),
        yellow(grpc_file, bold=True),
        yellow("->"),
        yellow(generated_grpc_file, bold=True),
        yellow(","),
        yellow(generated_proto_file, bold=True),
        yellow(","),
        yellow(generated_proto_pyi, bold=True),
    )

    # ####################################################################
    # Generate the actual sources
    # ####################################################################
    subprocess.check_call([
        "python",
        "-m",
        "grpc_tools.protoc",
        "-I",
        os.curdir,
        "-I",
        os.path.dirname(grpc_file),
        f"--python_out={output_folder}",
        f"--grpc_python_out={output_folder}",
        f"--mypy_out={output_folder}",
        grpc_file,
    ])

    # ####################################################################
    # Prepare the content to OaaS with a static method on the type, and
    # patch the imports because the generator can't do it
    # ####################################################################
    with open(generated_grpc_file, "rt", encoding="utf-8") as f:
        content = f.read()

        # In OaaS we register as clients directly the type. Unfortunately,
        # to add them into a grpc server we need to use the generated
        # add_to_server
        new_content = convert_to_static_method(content)

        # We override the module name only if there is a custom module
        # passed
        if module_name:
            new_content = IMPORT_RE.sub(f"\\1 {module_name}.\\2", new_content)

    with open(generated_grpc_file, "wt", encoding="utf-8") as f:
        f.write(new_content)

    # ####################################################################
    # Run black over the final sources
    # ####################################################################
    subprocess.check_call([
        "python",
        "-m",
        "black",
        generated_grpc_file,
        generated_proto_file,
        generated_proto_pyi,
    ])
Example #8
0
def process_folder(
    current_path: str,
    file_resolver: FileResolver,
    project_parameters: Dict[str, Union[str, List[str]]],
    auto_resolve_conflicts: bool,
    keep_current_files_on_conflict: bool,
) -> None:
    """
    Recursively process the handlebars templates for the given project.
    """
    for file_entry in file_resolver.listdir():
        file: ParsedFile = parse_file_name(file_entry.name, project_parameters)

        full_local_path = os.path.join(current_path, file.name)
        full_file_path = file_entry.absolute_path

        if file_entry.name == "HELP.md" or file_entry.name == ".ars":
            print(cyan("Ignoring file        :"), cyan(file_entry.name, bold=True))
            continue

        if file_entry.is_dir:
            if os.path.isdir(full_local_path):
                print(cyan("Already exists folder:"), cyan(full_local_path, bold=True))
            else:
                print(
                    yellow("Creating folder      :"), yellow(full_local_path, bold=True)
                )
                os.makedirs(full_local_path)

            process_folder(
                full_local_path,
                file_resolver.subentry(file_entry),
                project_parameters,
                auto_resolve_conflicts,
                keep_current_files_on_conflict,
            )
            continue

        if file.keep_existing and os.path.isfile(full_local_path):
            print(cyan("Keeping regular file :"), cyan(full_local_path, bold=True))
            continue

        if not file.hbs_template:
            if not os.path.isfile(full_local_path):
                if os.path.islink(full_file_path):
                    print(
                        yellow("Linking regular file :"),
                        yellow(full_local_path, bold=True),
                    )
                else:
                    print(
                        yellow("Copying regular file :"),
                        yellow(full_local_path, bold=True),
                    )

                copy_or_link(full_file_path, full_local_path)
                continue

            if filecmp.cmp(full_file_path, full_local_path):
                print(cyan("No update needed     :"), cyan(full_local_path, bold=True))
                continue

            if is_first_file_newer(full_local_path, full_file_path):
                print(
                    cyan("No update needed ") + cyan("date", bold=True) + cyan(":"),
                    cyan(full_local_path, bold=True),
                )
                continue

            # we  have  a conflict.
            if auto_resolve_conflicts:
                print(
                    red("Conflict"),
                    red("auto", bold=True),
                    red("       :"),
                    red(full_local_path, bold=True),
                )

                copy_or_link(full_file_path, full_local_path)

                continue

            if keep_current_files_on_conflict:
                print(
                    red("Conflict"),
                    red("keep", bold=True),
                    red("       :"),
                    red(full_local_path, bold=True),
                )

                os.utime(full_local_path, (now(), now()))

                continue

            full_local_path_orig = full_local_path + ".orig"
            shutil.copy(full_local_path, full_local_path_orig, follow_symlinks=True)
            copy_or_link(full_file_path, full_local_path)

            # if 'linux' in sys.platform:
            execute_diff(full_local_path, full_local_path_orig)

            print(red("Conflict resolved    :"), red(full_local_path, bold=True))
            continue

        if os.path.islink(full_file_path):
            print(red("FATAL ERROR", bold=True))
            print(red("Template link found  :"), red(full_file_path, bold=True))
            sys.exit(1)

        with open(full_file_path, "r", encoding="utf8") as template_file:
            template_content = template_file.read()

        template = pybars.Compiler().compile(template_content)
        content = template(project_parameters)

        if not os.path.isfile(full_local_path):
            print(yellow("Parsing HBS template :"), yellow(full_local_path, bold=True))

            with open(full_local_path, "w", encoding="utf8") as content_file:
                content_file.write(content)

            shutil.copystat(full_file_path, full_local_path)

            continue

        if content == open(full_local_path, "r", encoding="utf8").read():
            print(cyan("No update needed     :"), cyan(full_local_path, bold=True))
            continue

        if is_first_file_newer(full_local_path, full_file_path):
            print(
                cyan("No update needed ") + cyan("date", bold=True) + cyan(":"),
                cyan(full_local_path, bold=True),
            )
            continue

        # we  have  a conflict.
        if auto_resolve_conflicts:
            print(
                red("Conflict"),
                red("auto", bold=True),
                red("HBS    :"),
                red(full_local_path, bold=True),
            )

            with open(full_local_path, "w", encoding="utf8") as content_file:
                content_file.write(content)

            continue

        if keep_current_files_on_conflict:
            print(
                red("Conflict"),
                red("auto", bold=True),
                red("HBS    :"),
                red(full_local_path, bold=True),
            )

            os.utime(full_local_path, (now(), now()))

            continue

        # we have a conflict
        full_local_path_orig = full_local_path + ".orig"
        shutil.copy(full_local_path, full_local_path_orig, follow_symlinks=True)
        with open(full_local_path, "w", encoding="utf8") as content_file:
            content_file.write(content)

        # if 'linux' in sys.platform:
        execute_diff(full_local_path, full_local_path_orig)

        print(red("Conflict resolved HBS:"), red(full_local_path, bold=True))
Example #9
0
def move(old_path: str, new_path: str) -> None:
    """
    git mv old/path new/path
    """
    monorepo = read_monorepo_config()

    old_path = _resolve_in_repo(monorepo, old_path)
    new_path = _resolve_in_repo(monorepo, new_path)

    if old_path not in monorepo.repos:
        print(
            red(old_path, bold=True),
            red("not defined in"),
            red(MONOREPO_CONFIG_FILE, bold=True),
        )
        sys.exit(1)

    print(cyan("moving"), cyan(old_path, bold=True), cyan("->"),
          cyan(new_path, bold=True))

    current_commit = get_current_commit(project_folder=monorepo.project_folder)
    remote_commit = get_remote_commit(monorepo=monorepo, old_path=old_path)

    if monorepo.squash:
        message = textwrap.dedent(f"""\
            git-monorepo: move {old_path} -> {new_path}
            
            git-subtree-dir: {new_path}
            git-subtree-split: {remote_commit}
        """)
    else:
        # FIXME: I'm not sure about the mainline thing, it is supposed
        #        to be the commit in the current tree, presumably for the
        #        subtree to have an easier time to decide what commits
        #        get in.
        message = textwrap.dedent(f"""\
            git-monorepo: move {old_path} -> {new_path}

            git-subtree-dir: {new_path}
            git-subtree-mainline: {current_commit}
            git-subtree-split: {remote_commit}
        """)

    # we ensure the path exists
    os.makedirs(os.path.dirname(new_path), exist_ok=True)

    subprocess.check_call(["git", "mv", old_path, new_path],
                          cwd=monorepo.project_folder)

    subprocess.check_call(
        ["git", "commit", "-m", message],
        cwd=monorepo.project_folder,
    )

    monorepo.repos[new_path] = monorepo.repos[old_path]
    del monorepo.repos[old_path]

    # FIXME: probably wrong location, and wrong commit
    write_synchronized_commits(monorepo, repo=new_path)

    print(
        "⚠️ ⚠️ ⚠️ ",
        yellow("WARNING", bold=True),
        "⚠️ ⚠️ ⚠️ ",
        yellow("don't forget to patch"),
        yellow(MONOREPO_CONFIG_FILE, bold=True),
        yellow("with the new location, and remove the old entry"),
    )