Beispiel #1
0
    def load(path: Path, schema_pointer):
        """Load and validate .yaml file."""
        schema = copy.deepcopy(schema_pointer)
        with path.open() as f:
            yaml = f.read()
            data = yaml_load(yaml, Any())
            is_template = path.name == "template.yaml"

            # Replace real Country and Timezone values with fakes
            if is_template:
                schema["woo/woocommerce_default_country"] = Enum(["LL"])
                schema["wp/timezone_string"] = Enum(["Region/Country"])
                schema["wp/DEFAULT_WPLANG"] = Enum(["ll_LL"])
                schema["woo/woocommerce_currency"] = Enum(["LLL"])

            if "woo/woocommerce_tax_classes" in data:
                # Inspect that tax classes and taxes match

                # create enum for taxes from defined tax_classes
                tax_classes = [
                    str(tax).lower().replace(" ", "-")
                    for tax in data["woo/woocommerce_tax_classes"]
                ]
                # +1 is for standard schema which is never defined in tax class
                for x in range(len(tax_classes) + 1):
                    # start counting with 1
                    schema[f"wootax/{x+1}"] = Map({
                        "country":
                        Enum(["LL"]) if is_template else Enum(COUNTRIES),
                        "state":
                        Str(),
                        "rate":
                        Decimal(),
                        "name":
                        Str(),
                        "priority":
                        Int(),
                        "compound":
                        Int(),
                        "shipping":
                        Int(),
                        "order":
                        Int(),
                        "class":
                        Enum([""]) if x == 0 else Enum(tax_classes),
                        "locations":
                        Map({}),
                    })
            try:
                return yaml_load(yaml, Map(schema), path)
            except YAMLError:
                raise

        return as_document(schema)
Beispiel #2
0
    def load_string(data: bytes, schema, path: str):
        """Load and validate yaml data."""
        try:
            return yaml_load(data, Map(schema), path)
        except YAMLError:
            raise

        return as_document(schema)
Beispiel #3
0
def load_info_yaml() -> InfoYAMLDict:
    """
    Load info.yaml file from the root of the repository.

    Raises
    ------
    YAMLValidationError
        When there was an error during schema validation of the info.yaml file.

    Returns
    -------
    YAML
        YAML data of the info.yaml file.
    """
    with open(ROOT_PATH / "info.yaml", encoding="utf-8") as fp:
        data = yaml_load(fp.read(), SCHEMA, label="info.yaml").data
        # the proper dictionary is already ensured by StrictYAML
        return cast(InfoYAMLDict, data)
Beispiel #4
0
def main() -> int:
    print("Loading info.yaml...")
    with open(ROOT_PATH / "info.yaml", encoding="utf-8") as fp:
        data = yaml_load(fp.read(), SCHEMA).data

    print("Checking order in sections...")
    exit_code = check_order(data)

    print("Preparing repo's info.json...")
    repo_info = data["repo"]
    repo_info["install_msg"] = repo_info["install_msg"].format_map(
        {"repo_name": repo_info["name"]})
    with open(ROOT_PATH / "info.json", "w", encoding="utf-8") as fp:
        json.dump(repo_info, fp, indent=4)

    all_requirements: typing.Set[str] = set()
    requirements: typing.Dict[typing.Tuple[int, int], typing.Set[str]] = {
        (3, 8): set(),
    }
    black_file_list: typing.Dict[typing.Tuple[int, int], typing.List[str]] = {
        (3, 8): [".ci"],
    }
    compileall_file_list: typing.Dict[typing.Tuple[int, int],
                                      typing.List[str]] = {
                                          (3, 8): ["."],
                                      }
    print("Preparing info.json files for cogs...")
    shared_fields = data["shared_fields"]
    global_min_bot_version = shared_fields.get("min_bot_version")
    global_min_python_version = shared_fields.get("min_python_version")
    cogs = data["cogs"]
    for pkg_name, cog_info in cogs.items():
        all_requirements.update(cog_info["requirements"])
        min_bot_version = cog_info.get("min_bot_version",
                                       global_min_bot_version)
        min_python_version = (3, 8)
        if min_bot_version is not None:
            red_version_info = VersionInfo.from_str(min_bot_version)
            for python_version, max_red_version in MAX_RED_VERSIONS.items():
                if max_red_version is None:
                    min_python_version = python_version
                    break
                if red_version_info >= max_red_version:
                    continue
                min_python_version = python_version
                break
        python_version = cog_info.get("min_python_version",
                                      global_min_python_version)
        if python_version is not None:
            if min_python_version < python_version:
                min_python_version = python_version
        for python_version, reqs in requirements.items():
            if python_version >= min_python_version:
                reqs.update(cog_info["requirements"])
        for python_version, file_list in compileall_file_list.items():
            if python_version is MAX_PYTHON_VERSION:
                continue
            if python_version >= min_python_version:
                file_list.append(pkg_name)
        black_file_list[min_python_version].append(pkg_name)

        print(f"Preparing info.json for {pkg_name} cog...")
        output = {}
        for key in AUTOLINT_COG_KEYS_ORDER:
            if key in SKIP_COG_KEYS_INFO_JSON:
                continue
            value = cog_info.get(key)
            if value is None:
                value = shared_fields.get(key)
                if value is None:
                    continue
            output[key] = value
        replacements = {
            "repo_name": repo_info["name"],
            "cog_name": output["name"],
        }
        shared_fields_namespace = SimpleNamespace(**shared_fields)
        maybe_bundled_data = ROOT_PATH / pkg_name / "data"
        if maybe_bundled_data.is_dir():
            new_msg = f"{output['install_msg']}\n\nThis cog comes with bundled data."
            output["install_msg"] = new_msg
        for to_replace in ("short", "description", "install_msg"):
            output[to_replace] = safe_format_alt(
                output[to_replace], {"shared_fields": shared_fields_namespace})
            if to_replace == "description":
                output[to_replace] = output[to_replace].format_map({
                    **replacements, "short":
                    output["short"]
                })
            else:
                output[to_replace] = output[to_replace].format_map(
                    replacements)

        with open(ROOT_PATH / pkg_name / "info.json", "w",
                  encoding="utf-8") as fp:
            json.dump(output, fp, indent=4)

    print("Preparing requirements file for CI...")
    with open(ROOT_PATH / ".ci/requirements/all_cogs.txt",
              "w",
              encoding="utf-8") as fp:
        fp.write("Red-DiscordBot\n")
        for requirement in sorted(all_requirements):
            fp.write(f"{requirement}\n")
    for python_version, reqs in requirements.items():
        folder_name = f"py{''.join(map(str, python_version))}"
        with open(
                ROOT_PATH / f".ci/{folder_name}/requirements/all_cogs.txt",
                "w",
                encoding="utf-8",
        ) as fp:
            fp.write("Red-DiscordBot\n")
            for req in sorted(reqs):
                fp.write(f"{req}\n")
        with open(ROOT_PATH / f".ci/{folder_name}/black_file_list.txt",
                  "w",
                  encoding="utf-8") as fp:
            fp.write(" ".join(sorted(black_file_list[python_version])))
        with open(
                ROOT_PATH / f".ci/{folder_name}/compileall_file_list.txt",
                "w",
                encoding="utf-8",
        ) as fp:
            fp.write(" ".join(sorted(compileall_file_list[python_version])))

    print("Preparing all cogs list in README.md...")
    with open(ROOT_PATH / "README.md", "r+", encoding="utf-8") as fp:
        text = fp.read()
        match = re.search(r"## Cog Menu\n{2}(.+)\n{2}## Contributing",
                          text,
                          flags=re.DOTALL)
        if match is None:
            print(
                "\033[91m\033[1mERROR:\033[0m Couldn't find cogs sections in README.md!"
            )
            return 1
        start, end = match.span(1)
        lines = []
        lines.append("---\n| Name | Description |\n| --- | --- |")
        for pkg_name, cog_info in cogs.items():
            replacements = {
                "repo_name": repo_info["name"],
                "cog_name": cog_info["name"],
            }
            desc = cog_info["short"].format_map(replacements)
            lines.append(f"| {pkg_name} | {desc} |")
        cogs_section = "\n".join(lines)
        fp.seek(0)
        fp.truncate()
        fp.write(f"{text[:start]}{cogs_section}{text[end:]}")

    print("Updating class docstrings...")
    update_class_docstrings(cogs, repo_info)
    print("Checking for cog_data_path usage...")
    check_cog_data_path_use(cogs)
    print("Checking for missing help docstrings...")
    check_command_docstrings(cogs)
    print("Checking for missing end user data statements...")
    check_for_end_user_data_statement(cogs)

    print("Done!")
    return exit_code
Beispiel #5
0
def load(path: str = CONFIG_FILE) -> Dict[str, Any]:
    with open(path, 'rt') as file:
        return cast(Dict[str, Any],
                    yaml_load(file.read(), schema=__CONFIG_SCHEMA).data)