예제 #1
0
def process(
    path: Path,
    locale: str,
    re_download_link: re.Pattern,
    re_old_versions: re.Pattern,
    re_change_log: re.Pattern,
    change_log: str,
):
    print(f"Processing {path}")

    with open(path, "r") as fi:
        text = fi.read()

    mt = re_download_link.search(text)
    if mt is None:
        print(f"Download link not found in: {path}")
        return

    plugin_name = mt.groups()[0]
    major_version = mt.groups()[1]
    minor_version = mt.groups()[2]
    patch_version = mt.groups()[3]
    download_url = mt.groups()[4]

    # Add old download link to Old Versions section.
    old_version = f"{major_version}.{minor_version}.{patch_version}"
    old_version_link = f"- [{plugin_name} {old_version} - VST 3 (github.com)]({download_url})"
    text = re_old_versions.sub(
        lambda exp: f"{exp.group()}\n{old_version_link}", text, count=1)

    # Update download link.
    new_version = f"{major_version}.{minor_version}.{int(patch_version) + 1}"
    new_downlaod_url = f"https://github.com/ryukau/VSTPlugins/releases/download/{release_name}/{plugin_name}{new_version}.zip"

    new_link = compose_download_link(locale, plugin_name, new_version,
                                     new_downlaod_url)
    if new_link is None:
        return

    text = re_download_link.sub(new_link, text, count=1)

    # Add change log.
    text = re_change_log.sub(
        lambda exp: f"{exp.group()}\n- {new_version}{change_log}",
        text,
        count=1)

    out_dir = Path("out") / Path(path.parts[-2])
    out_dir.mkdir(parents=True, exist_ok=True)
    with open(out_dir / Path(path.name), "w") as fi:
        fi.write(text)
예제 #2
0
def delete_length_to_flags(plugin: up.utauplugin.UtauPlugin,
                           pattern: re.Pattern):
    """フラグ内のLengthを消す

    '【480】g-2H40' -> 'g-2H40'
    'g-2【480】H40' -> 'g-2H40'
    """
    for note in plugin.notes:
        note.flags = pattern.sub('', note.flags)
def update_version(pattern: re.Pattern, v: str, file_path: str):
    print(f"Replacing {pattern} to {version} in {file_path}")
    with open(file_path, "r+") as f:
        file_content = f.read()
        if not pattern.search(file_content):
            raise Exception(
                f"Pattern {pattern!r} doesn't found in {file_path!r} file")
        new_content = pattern.sub(fr'\g<1>{v}\g<2>', file_content)
        if file_content == new_content:
            return
        f.seek(0)
        f.truncate()
        f.write(new_content)
예제 #4
0
def get_drug_names_by_suffix(drug_name: str, suffixes: List[str],
                             split_chars: re.Pattern,
                             remove_chars: re.Pattern):
    drug_name_token_list = []
    drug_name = drug_name.lower()
    drug_token = split_chars.split(drug_name)
    drug_token = [remove_chars.sub("", token) for token in drug_token]
    for token in drug_token:
        for suffix in suffixes:
            if token.endswith(suffix):
                drug_name_token_list.append(token)
                break

    return drug_name_token_list
예제 #5
0
파일: utils.py 프로젝트: sthagen/gcovr
def presentable_filename(filename: str, root_filter: re.Pattern) -> str:
    """mangle a filename so that it is suitable for a report"""

    normalized = root_filter.sub("", filename)
    if filename.endswith(normalized):
        # remove any slashes between the removed prefix and the normalized name
        if filename != normalized:
            while normalized.startswith(os.path.sep):
                normalized = normalized[len(os.path.sep) :]
    else:
        # Do no truncation if the filter does not start matching
        # at the beginning of the string
        normalized = filename

    return normalized.replace("\\", "/")
예제 #6
0
def search_and_replace(
    site: mwclient.Site,
    search: str,
    regex: re.Pattern,
    replace: str,
    namespace: int = 0,
) -> None:
    for result in site.search(search, namespace=namespace, what='text'):
        page = site.pages[result['title']]
        text = page.text()
        newtext = regex.sub(replace, text)
        if newtext != text:
            page.edit(newtext,
                      summary=f'replace {regex.pattern} with {replace} (auto)')
            logger.info('page %s updated.', page.page_title)
        else:
            logger.warning('page %s not updated.', page.page_title)
예제 #7
0
    def replace(self, search_value: re.Pattern, replace_value: str, replace_replaced_words: bool = False):
        if not replace_replaced_words and self.search_value_contains_replaced_words(search_value, replace_value):
            return self
        replacing_word = self.word
        if search_value.search(self.word) is not None:
            replacing_word = search_value.sub(replace_value, self.word)
        collection = search_value.findall(self.word)
        replaced_words: List[str]
        if len(collection) > 1:
            replaced_words = list(map(lambda s: s.replace(s, replace_value), collection))
        else:
            replaced_words = []

        if replacing_word != self.word:
            for word in replaced_words:
                self.replaced_words.add(word)
            self.word = replacing_word
        return self
예제 #8
0
def add_symbols(
    text: str,
    symbol: str,
    searcher: re.Pattern,  # type: ignore
    ignore_tags: Iterable = frozenset([
        'html', '[document]', 'title', 'style', 'head', 'script', 'noscript',
        'pre', 'code', 'meta', 'img'
    ]),
    ignore_tag_types: Iterable = (element.Doctype, element.Comment,
                                  element.Declaration),
) -> str:
    soup = BeautifulSoup(text, "html.parser")

    for tag in soup.findAll(text=True):
        if tag.parent.name in ignore_tags or type(tag) in ignore_tag_types:
            continue

        edited_text = searcher.sub(lambda matched: matched.group(0) + symbol,
                                   tag.string)
        tag.replace_with(edited_text)
    return soup.prettify()
예제 #9
0
def build_referral_map_helper(
    mtgjson_object: Union[MtgjsonCardObject, MtgjsonSealedProductObject],
    string_regex: re.Pattern,
) -> List[Tuple[str, str]]:
    """
    Helps construct the referral map contents
    :param mtgjson_object: MTGJSON Set or Card object
    :param string_regex: compiled scryfall regex data
    :return: tuple to append
    """
    return_list = []
    for service, url in mtgjson_object.purchase_urls.to_json().items():
        if service not in mtgjson_object.raw_purchase_urls:
            LOGGER.info(f"Service {service} not found for {mtgjson_object.name}")
            continue

        return_list.append(
            (
                url.split("/")[-1],
                string_regex.sub("mtgjson", mtgjson_object.raw_purchase_urls[service]),
            )
        )
    return return_list
예제 #10
0
def replace_from_dict_and_pattern(string: str, replacements: Dict,
                                  pattern: re.Pattern) -> str:
    """ Given a pattern which matches keys in replacements, replace keys found in string with their values"""
    return pattern.sub(lambda match: replacements[match.group(0)], string)
예제 #11
0
def process_line(line: Line, re_pat: re.Pattern,
                 re_map: Dict[str, str]) -> Iterable[str]:
    """Replace links according to map and pattern."""
    yield re_pat.sub(lambda m: re_map[re.escape(m.group(0))], line.text)
예제 #12
0
async def worker(repo_owner: str, submodule_regex: re.Pattern, q: asyncio.Queue):
    """
    Clones repository locally, if necessary updates submodules urls to new
    remote, creates new remote repository and pushes to it, when done deletes
    local repository.
    """
    (repo_name, repo_url) = await q.get()
    print(f"Started processing {repo_name}")

    try:
        # Check if repository with same name already exists on GitHub
        proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
            f"hub api repos/{repo_owner}/{repo_name}",
            stdout=asyncio.subprocess.PIPE,
            stderr=asyncio.subprocess.PIPE,
        )
        await proc.wait()
        if proc.returncode == 0:
            raise Exception(
                f"Stopped processing {repo_name}: repo with same name already exists for this owner on GitHub"
            )

        # If we don't need to update submodules we mirror the repository so it's faster
        mirror: str = "--mirror" if not submodule_regex else ""

        # Clones repo
        proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
            f"git clone {mirror} {repo_url}",
            stdout=asyncio.subprocess.DEVNULL,
            stderr=asyncio.subprocess.DEVNULL,
        )
        await proc.wait()

        repo_folder: str = f"{repo_name}.git" if mirror else repo_name

        # No need to update submodules so skip the whole step
        if not mirror:
            proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
                "git branch -r",
                stdout=asyncio.subprocess.PIPE,
                stderr=asyncio.subprocess.DEVNULL,
                cwd=repo_folder,
            )
            (stdout, _) = await proc.communicate()

            # Get all remote branches of current repo
            branches = set()
            for line in stdout.decode().splitlines():
                if "origin/HEAD" in line:
                    line: str = line.split("->")[1]
                branch = line.replace("origin/", "").strip()
                branches.add(branch)

            for branch in branches:
                # Checkout branch
                proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
                    f"git checkout {branch}",
                    stdout=asyncio.subprocess.DEVNULL,
                    stderr=asyncio.subprocess.DEVNULL,
                    cwd=repo_folder,
                )
                await proc.wait()

                # No submodules in this branch
                if not os.path.exists(f"{repo_folder}/.gitmodules"):
                    continue

                # Replaces submodules URLs
                with open(f"{repo_folder}/.gitmodules", "r+") as f:
                    text = ""
                    for line in f.readlines():
                        text += submodule_regex.sub(
                            fr"github.com:{repo_owner}/\2.git", line
                        )
                    f.seek(0)
                    f.write(text)
                    f.truncate()

                # Commit change
                proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
                    "git commit -a -m '[Migra] Updated submodules'",
                    stdout=asyncio.subprocess.DEVNULL,
                    stderr=asyncio.subprocess.DEVNULL,
                    cwd=repo_folder,
                )
                await proc.wait()

        # Removes existing remote
        proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
            "git remote remove origin",
            stdout=asyncio.subprocess.DEVNULL,
            stderr=asyncio.subprocess.DEVNULL,
            cwd=repo_folder,
        )
        await proc.wait()

        # Creates repository on GitHub
        proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
            f"hub create -p {repo_owner}/{repo_name}",
            stdout=asyncio.subprocess.DEVNULL,
            stderr=asyncio.subprocess.DEVNULL,
            cwd=repo_folder,
        )
        await proc.wait()

        proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
            "git push --mirror",
            stdout=asyncio.subprocess.DEVNULL,
            stderr=asyncio.subprocess.DEVNULL,
            cwd=repo_folder,
        )
        await proc.wait()

        # Removes local repository
        proc: asyncio.subprocess.Process = await asyncio.create_subprocess_shell(
            f"rm -rf {repo_folder}",
            stdout=asyncio.subprocess.DEVNULL,
            stderr=asyncio.subprocess.DEVNULL,
        )
        await proc.wait()

        print(f"{repo_name} migrated")

    except Exception as e:
        print(e)
    finally:
        q.task_done()
예제 #13
0
def delete_tag_from_label(plugin: up.utauplugin.UtauPlugin, pattern: re.Pattern):
    """
    ラベル内のノート番号を消す
    """
    for note in plugin.notes:
        note.label = pattern.sub('', note.label)
예제 #14
0
def _to_snake_case(camel_case_key: str, pattern: Pattern) -> str:
    return pattern.sub('_', camel_case_key).lower()
예제 #15
0
def sub_or_none(pattern: Pattern,
                repl: Union[AnyStr, Callable[[Match[AnyStr]], AnyStr]],
                string: AnyStr,
                count: int = 0) -> AnyStr:
    return pattern.sub(repl, string, count) if string is not None else None
예제 #16
0
def process(
    path: Path,
    locale: str,
    re_download_link: re.Pattern,
    re_old_versions: re.Pattern,
    re_no_old_versions: re.Pattern,
    re_change_log: re.Pattern,
    change_log: str,
):
    # print(f"Processing {path}") # debug

    with open(path, "r", encoding="utf-8") as fi:
        text = fi.read()

    text = re_no_old_versions.sub("", text)

    matches = list(re_download_link.finditer(text))

    if len(matches) == 0:
        print(f"Download link not found in: {path}")
        return

    hasSinglePlugin = len(matches) == 1
    for mt in matches:
        plugin_name = mt.groups()[0]
        major_version = mt.groups()[1]
        minor_version = mt.groups()[2]
        patch_version = mt.groups()[3]
        download_url = mt.groups()[4]

        source_version = get_source_version(plugin_name)
        if (major_version != source_version[0]
                or minor_version != source_version[1]
                or int(patch_version) + 1 != int(source_version[2])):
            src_ver = ".".join(source_version)
            man_ver = ".".join([major_version, minor_version, patch_version])
            print(
                f"Warning: {plugin_name} version mismatch. source {src_ver} manual {man_ver}"
            )

        # Update download link.
        new_version = f"{major_version}.{minor_version}.{int(patch_version) + 1}"
        new_downlaod_url = f"https://github.com/ryukau/VSTPlugins/releases/download/{release_name}/{plugin_name}_{new_version}.zip"
        new_link = compose_download_link(locale, plugin_name, new_version,
                                         new_downlaod_url)
        if new_link is None:
            continue

        pos = mt.start()
        text = text[:pos] + re_download_link.sub(new_link, text[pos:], count=1)

        # Add change log.
        if hasSinglePlugin:
            text = re_change_log.sub(
                lambda exp: f"{exp.group()}\n- {new_version}{change_log}",
                text,
                count=1)
        else:
            pos = re_change_log.search(text).end()
            text = text[:pos] + re.sub(
                f"### {plugin_name}",
                f"### {plugin_name}\n- {new_version}{change_log}",
                text[pos:],
                count=1)

        # Add old download link to Old Versions section.
        old_version = f"{major_version}.{minor_version}.{patch_version}"
        old_version_link = f"- [{plugin_name} {old_version} - VST 3 (github.com)]({download_url})"

        if hasSinglePlugin:
            text = re_old_versions.sub(
                lambda exp: f"{exp.group()}\n{old_version_link}",
                text,
                count=1)
        else:
            pos = re_old_versions.search(text).end()
            text = text[:pos] + re.sub(
                f"### {plugin_name}",
                f"### {plugin_name}\n{old_version_link}",
                text[pos:],
                count=1)

    out_dir = Path("out") / Path(path.parts[-2])
    out_dir.mkdir(parents=True, exist_ok=True)
    with open(out_dir / Path(path.name), "w", encoding="utf-8") as fi:
        fi.write(text)