Beispiel #1
0
def _apply_saved_variables(config_data, group_data, apply_data, dry_run,
                           task_type, source_file_dir, wtf_attr):
    wow_dir_path = config_data[constant.Syntax.WOW_DIR_PATH]
    tasks = apply_data[task_type]
    target_file_duplicate_filter = set()  # type: typing.Set[str]
    final_content_cache = dict()  # type: typing.Dict[str, str]

    all_wtf_char_list = list()  # type: typing.List[wtf.WtfCharacter]
    for member in group_data["_all"]:
        account, realm, char = member.split(".")
        wtf_char = wtf.WtfCharacter(wow_dir_path=wow_dir_path,
                                    account=account,
                                    realm=realm,
                                    char=char)
        all_wtf_char_list.append(wtf_char)

    for task_data in tasks:
        members = evaluate_members(task_data=task_data, group_data=group_data)
        allow_addons = task_data[constant.Syntax.ALLOW_ADDONS]

        wtf_char_list = list()  # type: typing.List[wtf.WtfCharacter]
        for member in members:
            account, realm, char = member.split(".")
            wtf_char = wtf.WtfCharacter(wow_dir_path=wow_dir_path,
                                        account=account,
                                        realm=realm,
                                        char=char)
            wtf_char_list.append(wtf_char)

        for wtf_char in wtf_char_list:
            for addon_sv_file in allow_addons:
                source_file = Path(source_file_dir, addon_sv_file)
                validate_exists(source_file)
                target_file = Path(getattr(wtf_char, wtf_attr), addon_sv_file)

                if source_file.abspath not in final_content_cache:
                    tpl = jinja2.Template(
                        source_file.read_text(encoding="utf-8"))
                    final_content = tpl.render(
                        characters=wtf_char_list,
                        all_characters=all_wtf_char_list)
                    final_content_cache[source_file.abspath] = final_content
                else:
                    final_content = final_content_cache[source_file.abspath]

                if target_file.abspath not in target_file_duplicate_filter:
                    print(
                        f"render '{source_file}' -- write to -> '{target_file.abspath}'"
                    )
                    if not dry_run:
                        target_file.atomic_write_text(final_content,
                                                      overwrite=True)
                    target_file_duplicate_filter.add(target_file.abspath)
Beispiel #2
0
    def get_title(self):
        """
        Get title line from .rst file.

        **中文文档**

        从一个 ``_filename`` 所指定的 .rst 文件中, 找到顶级标题.
        也就是第一个 ``====`` 或 ``----`` 或 ``~~~~`` 上面一行.
        """
        header_bar_char_list = "=-~+*#^"

        lines = list()
        for cursor_line in textfile.readlines(self.rst_path,
                                              strip="both",
                                              encoding="utf-8"):
            if cursor_line.startswith(".. include::"):
                relative_path = cursor_line.split("::")[-1].strip()
                included_path = Path(
                    Path(self.rst_path).parent.abspath, relative_path)
                if included_path.exists():
                    cursor_line = included_path.read_text(encoding="utf-8")
            lines.append(cursor_line)
        rst_content = "\n".join(lines)

        cursor_previous_line = None
        for cursor_line in rst_content.split("\n"):
            for header_bar_char in header_bar_char_list:
                if cursor_line.startswith(header_bar_char):
                    flag_full_bar_char = cursor_line == header_bar_char * len(
                        cursor_line)
                    flag_line_length_greather_than_1 = len(cursor_line) >= 1
                    flag_previous_line_not_empty = bool(cursor_previous_line)
                    if flag_full_bar_char \
                        and flag_line_length_greather_than_1 \
                        and flag_previous_line_not_empty:
                        return cursor_previous_line.strip()
            cursor_previous_line = cursor_line

        msg = "Warning, this document doesn't have any %s header!" % header_bar_char_list
        return None
Beispiel #3
0
def _apply(config_data, group_data, apply_data, dry_run, task_type,
           source_file_dir, wtf_attr):
    wow_dir_path = config_data[constant.Syntax.WOW_DIR_PATH]
    tasks = apply_data[task_type]
    target_file_duplicate_filter = set()
    for task_data in tasks:
        members = evaluate_members(task_data=task_data, group_data=group_data)
        source_file = Path(source_file_dir, task_data[constant.Syntax.FILE])
        source_file_content = source_file.read_text(encoding="utf-8")
        for member in members:
            account, realm, char = member.split(".")
            wtf_char = wtf.WtfCharacter(wow_dir_path=wow_dir_path,
                                        account=account,
                                        realm=realm,
                                        char=char)
            target_file = getattr(wtf_char, wtf_attr)
            if target_file.abspath not in target_file_duplicate_filter:
                print(f"copy '{source_file}' ---> '{target_file.abspath}'")
                if not dry_run:
                    target_file.atomic_write_text(source_file_content,
                                                  overwrite=True)
                target_file_duplicate_filter.add(target_file.abspath)
def generate_terraform_script(tf_dir):
    """
    This function looking for ``main.tf.tpl``, ``variables.tf.tpl`` files in
    ``tf_dir``. And use jinja2 template engine to generate the real tf files.
    It pass in the config object to dynamically inject values.

    :param tf_dir: terraform workspace directory.
    """

    tf_dir = Path(tf_dir)
    if not tf_dir.is_dir():
        raise TypeError

    tf_files = ["main", "variables", "backend", "output"]

    for file in tf_files:
        tpl_file = Path(tf_dir, file + ".tf.tpl")
        tf_file = Path(tf_dir, file + ".tf")
        if tpl_file.exists():
            tpl = jinja2.Template(tpl_file.read_text(encoding="utf-8"))
            content = tpl.render(config=config)
            tf_file.write_text(content, encoding="utf-8")
                    tag_property_name = _get_tags_attr(aws_object_class)
                    if tag_property_name is not None:
                        try:
                            tag_property_name_mapper[
                                aws_object_class.
                                resource_type] = tag_property_name
                        except:
                            pass
    return tag_property_name_mapper


tag_property_name_mapper_cache_file = Path(__file__).change(
    new_basename="tag_property_name_mapper.json")
if tag_property_name_mapper_cache_file.exists():
    tag_property_name_mapper = json.loads(
        tag_property_name_mapper_cache_file.read_text())
else:  # pragma: no cover
    tag_property_name_mapper = get_tag_property_name_mapper()
    tag_property_name_mapper_cache_file.write_text(
        json.dumps(tag_property_name_mapper))


def get_tags_attr(resource):
    """
    Quickly find the property name for tags using Cache.

    :type resource: AWSObject
    :rtype: str
    """
    return tag_property_name_mapper.get(resource.resource_type)
此模块用于枚举所有的账号密码, 避免将敏感信息写入代码, 并安全地在 Hotkeynet 脚本中引用这些
信息.
"""

import json
import typing

import attr
from pathlib_mate import PathCls as Path
from enum import Enum

# put credentials.json file in the git repo root folder
credentials_file = Path(__file__).parent.parent.parent.parent.change(
    new_basename="credentials.json")
assert credentials_file.parent.basename == "hotkeynet-project"
credentials_data = json.loads(credentials_file.read_text(encoding="utf-8"))


@attr.s
class Credential:
    username = attr.ib()
    password = attr.ib()


# Enumerate all username password data object
# 枚举出所有的用户名密码的数据对象, 以供之后引用
class Credentials(Enum):
    cred_fatmulti1 = Credential(username="******",
                                password=credentials_data["fatmulti1"])
    cred_fatmulti2 = Credential(username="******",
                                password=credentials_data["fatmulti2"])
        ),
    ],
)

# --- ECR Repository
cft_dir = Path(__file__).parent
repos_dir = cft_dir.change(new_basename="repos")

DEFAULT_UNTAGGED_IMAGE_EXPIRE_DAYS = 30

repo_names = list()
for subfolder in repos_dir.select_dir(recursive=False):
    repo_config_file = Path(subfolder, "config.json")
    repo_basename = subfolder.basename
    if repo_config_file.exists():
        repo_config_data = json.loads(strip_comments(repo_config_file.read_text("utf-8")))
        try:
            untagged_image_expire_days = repo_config_data["untagged_image_expire_days"]
        except:
            untagged_image_expire_days = DEFAULT_UNTAGGED_IMAGE_EXPIRE_DAYS
    else:
        untagged_image_expire_days = DEFAULT_UNTAGGED_IMAGE_EXPIRE_DAYS

    repo_logic_id = f"EcrRepo{camelcase(repo_basename)}"
    repo_name = f"{config.ENVIRONMENT_NAME.get_value()}-{repo_basename}"
    repo_names.append(repo_name)

    ecr_lifecycle_policy = {
        "rules": [
            {
                "rulePriority": 1,