Exemple #1
0
def _find_root(nvim: Nvim, _pattern: Any, filename: str,
               bufnr: int) -> Optional[str]:
    pattern: Optional[RootPattern] = _DECODER(_pattern)
    path = Path(filename)

    if not pattern:
        return str(get_cwd(nvim))
    else:
        for parent in path.parents:
            for member in parent.iterdir():
                name = member.name
                if name in pattern.exact:
                    return str(parent)
                else:
                    for glob in pattern.glob:
                        if fnmatch(name, glob):
                            return str(parent)
        else:
            if pattern.fallback is RPFallback.none:
                return None
            elif pattern.fallback is RPFallback.cwd:
                return str(get_cwd(nvim))
            elif pattern.fallback is RPFallback.home:
                return str(Path.home())
            elif pattern.fallback is RPFallback.parent:
                return str(path.parent)
            else:
                never(pattern)
Exemple #2
0
def iconify(icons: Icons, completion: Completion) -> Completion:
    if not completion.icon_match:
        return completion
    else:
        alias = icons.aliases.get(
            completion.icon_match) or completion.icon_match
        kind = icons.mappings.get(alias)
        if not kind:
            return completion
        else:
            if icons.mode is IconMode.none:
                return completion

            elif icons.mode is IconMode.short:
                return replace(completion,
                               kind=kind + (icons.spacing - 1) * " ")

            elif icons.mode is IconMode.long:
                spc = max(1, icons.spacing) * " "
                new_kind = (f"{kind}{spc}{completion.kind}" if completion.kind
                            else kind + (icons.spacing - 1) * " ")
                return replace(completion, kind=new_kind)

            else:
                never(icons.mode)
Exemple #3
0
def _directory(topic: _Topics) -> Tuple[Path, str]:
    if topic is _Topics.index:
        return MD_README, URI_README
    elif topic is _Topics.config:
        return MD_CONF, URI_CONF
    elif topic is _Topics.keybind:
        return MD_KEYBIND, URI_KEYBIND
    elif topic is _Topics.snips:
        return MD_SNIPS, URI_SNIPS
    elif topic is _Topics.fuzzy:
        return MD_FUZZY, URI_FUZZY
    elif topic is _Topics.comp:
        return MD_COMPLETION, URI_COMPLETION
    elif topic is _Topics.display:
        return MD_DISPLAY, URI_DISPLAY
    elif topic is _Topics.sources:
        return MD_SOURCES, URI_SOURCES
    elif topic is _Topics.misc:
        return MD_MISC, URI_MISC
    elif topic is _Topics.stats:
        return MD_STATS, URI_STATISTICS
    elif topic is _Topics.perf:
        return MD_PREF, URI_PREF
    elif topic is _Topics.custom_sources:
        return MD_C_SOURCES, URI_C_SOURCES
    else:
        never(topic)
Exemple #4
0
def _parser(grammar: SnippetGrammar) -> Callable[[Context, ParseInfo, str], Parsed]:
    if grammar is SnippetGrammar.lsp:
        return lsp_tokenizer
    elif grammar is SnippetGrammar.snu:
        return snu_tokenizer
    else:
        never(grammar)
Exemple #5
0
 def cont() -> Iterator[Any]:
     for sb in sortby:
         if sb is Sortby.is_folder:
             yield _CompVals.FOLDER if is_dir(node) else _CompVals.FILE
         elif sb is Sortby.ext:
             yield strxfrm(node.ext or ""),
         elif sb is Sortby.file_name:
             yield strxfrm(node.name)
         else:
             never(sb)
Exemple #6
0
def token_parser(context: ParserCtx, stream: TokenStream) -> Parsed:
    idx = 0
    raw_regions: MutableMapping[int, MutableSequence[Region]] = {}
    slices: MutableSequence[str] = []
    begins: MutableSequence[Tuple[int, Union[Begin, DummyBegin]]] = []
    bad_tokens: MutableSequence[Tuple[int, Token]] = []

    for token in stream:
        if isinstance(token, Unparsed):
            token = token
            bad_tokens.append((idx, token))
        elif isinstance(token, str):
            idx += len(encode(token))
            slices.append(token)
        elif isinstance(token, Begin):
            begins.append((idx, token))
        elif isinstance(token, DummyBegin):
            begins.append((idx, token))
        elif isinstance(token, End):
            if begins:
                pos, begin = begins.pop()
                if isinstance(begin, Begin):
                    acc = raw_regions.setdefault(begin.idx, [])
                    acc.append(Region(begin=pos, end=idx, text=""))
            else:
                bad_tokens.append((idx, token))
        else:
            never(token)

    bad_tokens.extend(begins)
    text = "".join(slices)
    min_key = min(raw_regions.keys(), key=lambda i:
                  (i == 0, i)) if raw_regions else -1
    cursor = next(
        iter(raw_regions.get(min_key, ())),
        Region(begin=len(encode(text)), end=0, text=""),
    ).begin

    if bad_tokens:
        tpl = """
        Bad tokens :: Most likely unbalanced `{…}` - ${bad_tokens}
        Parsed: |-
        ${text}
        Original: |-
        ${ctx}
        """
        msg = Template(dedent(tpl)).substitute(bad_tokens=bad_tokens,
                                               text=text,
                                               ctx=context.text)
        raise ParseError(msg)

    regions = tuple(_consolidate(text, regions=raw_regions))
    parsed = Parsed(text=text, cursor=cursor, regions=regions)
    return parsed
Exemple #7
0
def _directory(topic: _Topics) -> Tuple[Path, str]:
    if topic is _Topics.index:
        return README_MD, README_URI
    elif topic is _Topics.features:
        return FEATURES_MD, FEATURES_URI
    elif topic is _Topics.keybind:
        return KEYBIND_MD, KEYBIND_URI
    elif topic is _Topics.config:
        return CONFIGURATION_MD, CONFIGURATION_URI
    elif topic is _Topics.theme:
        return THEME_MD, THEME_URI
    elif topic is _Topics.migration:
        return MIGRATION_MD, MIGRATION_URI
    else:
        never(topic)
Exemple #8
0
    def cont() -> Iterator[int]:
        for e in chain((edit, ), edits):
            if isinstance(e, ContextualEdit):
                lo = row - (len(e.old_prefix.split(ctx.linefeed)) - 1)
                hi = row + (len(e.old_suffix.split(ctx.linefeed)) - 1)
                yield from (lo, hi)

            elif isinstance(e, RangeEdit):
                (lo, _), (hi, _) = e.begin, e.end
                yield from (lo, hi)

            elif isinstance(e, Edit):
                yield row

            else:
                never(e)
Exemple #9
0
async def main() -> int:
    mode, lines, args = _parse_args()

    if mode is Mode.preview:
        sha, *_ = _parse_lines(lines)
        await _fzf_rhs(args.unified, sha=sha, path=args.path)

    elif mode is Mode.execute:
        stdout.write(join(chain(_parse_lines(lines), (normcase(args.path), ))))

    elif mode is Mode.normal:
        commits = await _git_file_log(args.path)
        await run_fzf(commits)

    else:
        never(mode)

    return 0
Exemple #10
0
async def main() -> int:
    mode, lines, _ = _parse_args()

    if mode is Mode.preview:
        (sha, path), *_ = _parse_lines(lines)
        await _fzf_rhs(sha, path=PurePath(path))

    elif mode is Mode.execute:
        await _git_show_many(_parse_lines(lines))

    elif mode is Mode.normal:
        paths = [path async for path in _git_dead_files()]
        await _fzf_lhs(paths)

    else:
        never(mode)

    return 0
Exemple #11
0
async def main() -> int:
    mode, lines, args = _parse_args()

    if mode is Mode.preview:
        sha, *_ = _parse_lines(lines)
        await pretty_commit(args.unified, sha=sha)

    elif mode is Mode.execute:
        stdout.write(join(_parse_lines(lines)))

    elif mode is Mode.normal:
        commits = [el async for el in _git_ls_commits()]
        await _fzf_lhs(commits)

    else:
        never(mode)

    return 0
Exemple #12
0
async def main() -> int:
    mode, lines, args = _parse_args()

    if mode is Mode.preview:
        sha, *_ = _parse_lines(lines)
        await pretty_commit(args.unified, sha=sha)

    elif mode is Mode.execute:
        stdout.write(join(_parse_lines(lines)))

    elif mode is Mode.normal:
        commits = await _ls_commits(args.regex, *args.search)
        await run_fzf(commits)

    else:
        never(mode)

    return 0
Exemple #13
0
async def main() -> int:
    mode, lines, _ = _parse_args()

    if mode is Mode.preview:
        preview_path, *_ = lines
        await _git_show_blame(PurePath(preview_path))

    elif mode is Mode.execute:
        stdout.write(join(lines))

    elif mode is Mode.normal:
        paths = [el async for el in _git_ls_files()]
        await _fzf_lhs(paths)

    else:
        never(mode)

    return 0
Exemple #14
0
def _instructions(
    ctx: Context,
    unifying_chars: AbstractSet[str],
    smart: bool,
    lines: _Lines,
    primary: Edit,
    secondary: Sequence[RangeEdit],
) -> Iterator[EditInstruction]:
    if isinstance(primary, RangeEdit):
        inst = _range_edit_trans(
            unifying_chars,
            smart=smart,
            ctx=ctx,
            primary=True,
            lines=lines,
            edit=primary,
        )
        yield inst

    elif isinstance(primary, ContextualEdit):
        inst = _contextual_edit_trans(ctx, lines=lines, edit=primary)
        yield inst

    elif isinstance(primary, Edit):
        inst = _edit_trans(unifying_chars,
                           smart=smart,
                           ctx=ctx,
                           lines=lines,
                           edit=primary)
        yield inst

    else:
        never(primary)

    for edit in secondary:
        yield _range_edit_trans(
            unifying_chars,
            smart=smart,
            ctx=ctx,
            primary=False,
            lines=lines,
            edit=edit,
        )
Exemple #15
0
async def main() -> int:
    mode, lines, args = _parse_args()
    older, newer = args.older, args.newer

    if mode is Mode.preview:
        path, *_ = _parse_lines(lines)
        await _fzf_rhs(args.unified,
                       older=older,
                       newer=newer,
                       path=PurePath(path))

    elif mode is Mode.execute:
        stdout.write(join(_parse_lines(lines)))

    elif mode is Mode.normal:
        files = await _git_file_diff(older=older, newer=newer)
        await run_fzf(files)

    else:
        never(mode)

    return 0
Exemple #16
0
def pprn(fmt: PrintFmt, text: str, resp: Resp, l_pad: int) -> Iterator[str]:
    if fmt is PrintFmt.json:
        yield dumps(
            recur_sort(encode(resp)),
            check_circular=False,
            ensure_ascii=False,
        )
    elif fmt is PrintFmt.pretty:
        cols, _ = get_terminal_size()
        yield "#" * cols
        yield linesep

        yield resp.language.name
        yield linesep * 2

        for match in _parse_matches(text, resp.matches):
            yield cols * "*"
            yield linesep
            yield from _pprn_match(match, l_pad=l_pad)

        yield "#" * cols
    else:
        never(fmt)
Exemple #17
0
def load_theme(
    nvim: Nvim,
    artifact: Artifact,
    particular_mappings: HLGroups,
    discrete_colours: Mapping[str, str],
    icon_set: IconGlyphSetEnum,
    icon_colour_set: IconColourSetEnum,
    text_colour_set: Union[LSColoursEnum, TextColourSetEnum],
) -> Tuple[IconGlyphs, HLcontext]:

    if icon_set is IconGlyphSetEnum.ascii:
        icons = artifact.icons.ascii
    elif icon_set is IconGlyphSetEnum.devicons:
        icons = artifact.icons.devicons
    elif icon_set is IconGlyphSetEnum.emoji:
        icons = artifact.icons.emoji
    else:
        never(icon_set)

    if text_colour_set is LSColoursEnum.env and "LS_COLORS" not in environ:
        text_colour_set = LSColoursEnum.solarized_dark_256

    if isinstance(text_colour_set, LSColoursEnum):
        if text_colour_set is LSColoursEnum.env:
            _lsc = environ.get("LS_COLORS", "")
        elif text_colour_set is LSColoursEnum.solarized_dark_256:
            _lsc = artifact.ls_colours.solarized_dark_256
        elif text_colour_set is LSColoursEnum.solarized_light:
            _lsc = artifact.ls_colours.solarized_light
        elif text_colour_set is LSColoursEnum.solarized_dark:
            _lsc = artifact.ls_colours.solarized_dark
        elif text_colour_set is LSColoursEnum.solarized_universal:
            _lsc = artifact.ls_colours.solarized_universal
        elif text_colour_set is LSColoursEnum.nord:
            _lsc = artifact.ls_colours.nord
        elif text_colour_set is LSColoursEnum.trapdoor:
            _lsc = artifact.ls_colours.trapdoor
        else:
            never(text_colour_set)

        lsc = parse_lsc(_lsc, discrete_colours=discrete_colours)
        mode_pre = lsc.mode_pre
        mode_post = lsc.mode_post
        ext_exact = lsc.exts
        name_exact: Mapping[str, HLgroup] = {}
        name_glob = lsc.name_glob
    else:
        if text_colour_set is TextColourSetEnum.nerdtree_syntax_light:
            text_colour = artifact.text_colours.nerdtree_syntax_light
        elif text_colour_set is TextColourSetEnum.nerdtree_syntax_dark:
            text_colour = artifact.text_colours.nerdtree_syntax_dark
        else:
            never(text_colour_set)

        mode_pre = {}
        mode_post = {}
        ext_exact = gen_hl(FM_HL_PREFIX, mapping=text_colour.ext_exact)
        name_exact = gen_hl(FM_HL_PREFIX, mapping=text_colour.name_exact)
        name_glob = gen_hl(FM_HL_PREFIX, mapping=text_colour.name_glob)

    icon_exts = gen_hl(FM_HL_PREFIX, mapping=artifact.icon_colours.github)

    groups = tuple(
        chain(
            icon_exts.values(),
            mode_pre.values(),
            mode_post.values(),
            ext_exact.values(),
            name_exact.values(),
            name_glob.values(),
        ), )

    context = HLcontext(
        groups=groups,
        icon_exts=_trans(icon_exts),
        mode_pre=_trans(mode_pre),
        mode_post=_trans(mode_post),
        ext_exact=_trans(ext_exact),
        name_exact=_trans(name_exact),
        name_glob=_trans(name_glob),
        particular_mappings=particular_mappings,
    )

    return icons, context