Example #1
0
def collect_ast():
    type_spec: dict = collections.defaultdict(set)

    def is_python(path: str):
        return path.endswith('.py') or Path(path).is_dir()

    all_python_files = linq.Flow(
        Path(Redy.__file__).parent().collect(is_python)).concat(
            Path(flask.__file__).parent().collect(is_python))._

    for each in all_python_files:
        with each.open('r', encoding='utf8') as file:
            try:
                ast_of_src_code = ast.parse(file.read())
                service = CollectASTTypeStub(type_spec)
                feature(service).just_apply_ast_transformation(ast_of_src_code)
            except SyntaxError:
                # other py version
                pass

    def snd(tp):
        return tp[1]

    stub_code = \
    (linq.Flow(type_spec)
         .map(lambda class_name, fields:
                linq.Flow(fields)
                    .group_by(lambda fst, snd: fst)
                    .map(lambda field_name, pairs:
                            '{}: {}'.format(
                            field_name,
                            compose(
                                str,
                                curry(reduce)(lambda a, b: a.union(b)),
                                curry(map)(snd))(pairs)))
                    .then(
                        compose(
                            'class {}(AST):\n'.format(class_name).__add__,
                            lambda _: textwrap.indent(_, " " * 4),
                            lambda any_code: any_code if any_code else 'pass',
                            '\n'.join))
                    ._)
         .then(
            compose(
                'import typing, abc\nNoneType = None\n'.__add__,
                'class AST(abc.ABC):\n    def __init__(self, *args, lineno: int=None, colno: int=None, **kwargs): pass\n'.__add__,
                '\n'.join
                )))._

    with Path('./').into('ast.pyi').open('w', encoding='utf8') as stub:
        stub.write(stub_code)

    with Path(ast.__file__).parent().into('ast.pyi').open(
            'w', encoding='utf8') as stub:
        stub.write(stub_code)
Example #2
0
def gen(i: 'input filename', o: 'output filename'):
    """
    generate python source code for dbg-lang
    """
    with Path(i).open('r') as fr:
        code = fr.read()
    res = parse(code)
    check_parsing_complete(code, res.tokens, res.state)

    with Path(o).open('w') as fw:
        fw.write(code_gen(res.result))
Example #3
0
def init_trinity_service() -> Module:
    module_dir = Path(__file__).parent()

    cs_proj_file = module_dir.into("Dependencies.csproj")

    print(cs_proj_file)
    # restore
    call(["dotnet", "restore", f'"{cs_proj_file}"', '--packages', f'"{Env.nuget_root}"'])

    # search dlls
    with open(str(cs_proj_file)) as file:
        deps = [Dependency(package_name=ref.attrs['include'],
                           version=ref.attrs["version"]).all()
                for ref in
                BeautifulSoup(file, "lxml").select('packagereference')
                ] | Collect | FilterDLL

    libs = [
               Library('GraphEngine.{}'.format(module), version='2.0.9328', where='runtimes/win-x64/native').all()
               for module in ['Core', 'FFI', 'Jit']
           ] | Collect | FilterDLL

    sys.path.append(str(module_dir.parent()))

    for each_lib in libs:
        ctypes.cdll.LoadLibrary(each_lib)

    for each_dep in deps:
        clr.AddReference(each_dep)

    graph_engine_config_path = Env.graph_engine_config_path

    __Trinity = __import__('Trinity')

    __Trinity.TrinityConfig.StorageRoot = str(graph_engine_config_path.into('storage'))

    # __Trinity.TrinityConfig.LoggingLevel = __Trinity.Diagnostics.LogLevel.Info

    __Trinity.TrinityConfig.LoadConfig(str(graph_engine_config_path.into("trinity.xml")))

    __Trinity.Global.Initialize()

    __ffi = __import__('ffi')

    __ffi.Init()

    __import__('Trinity.Storage')
    __import__('Trinity.Storage.Composite')
    __import__('Trinity.FFI')
    __import__('Trinity.FFI.Metagen')

    Env.Trinity = __Trinity
    return __Trinity
Example #4
0
def from_file(input: 'filename', to: 'filename'):
    """
    from python source to json file
    """
    path = Path(input)
    with path.open('r') as fr, Path(to).open('w') as fw:
        try:
            data = to_dict(ast.parse(fr.read()))
            data['name'] = path.relative()[:-3]  # remove `.py`
            json.dump([str(path), data], fw, indent=2)
        except SyntaxError as e:
            print(e)
            pass
Example #5
0
    def try_find(prospective_path):
        path_secs = (prospective_path, *names.split('.'))
        *init, end = path_secs
        directory = Path(*init)
        if not directory.is_dir():
            return
        for each in directory.list_dir():
            each_path_str = each.relative()
            # print(each_path_str, end)
            if each_path_str == end + '.py':
                module_path = directory.into(each_path_str)
                yield get_yapypy_module_spec_from_path(names, str(module_path))

            elif each_path_str == end and each.is_dir(
            ) and '__init__.py' in each:
                yield from try_find(str(each))
Example #6
0
def build():
    dir, _ = path.split(__file__)
    dir = path.join(dir, 'src')

    with open(path.join(dir, 'dynamic.json'), 'rb') as f:
        dy : list = json.load(f)

    for each in dy:
        each['release_date'] = parse_date(each['release_date'])
    dy = sorted(dy, key=lambda each: each['release_date'], reverse=True)
    with open(path.join(dir, 'index.rst'), 'w', encoding='utf8') as w, \
         open(path.join(dir, 'index.rst.template'), 'r', encoding='utf8') as r:
        write = w.write
        write(r.read())
        write('\n')
        for each in dy:
            title = each['title']
            where = each['where']
            where = "./" + "/".join(where.split('.')) + '.html'
            time = each['release_date']
            keywords = each['keywords']
            write('\n')
            write(card(
                title=title,
                link = where,
                time = time,
                keywords=keywords))

    os.system('sphinx-build -b html ./src ./')
    for each_static in Path("./src/BackupStatic").list_dir():
      each_static.move_to("./Backup/")
Example #7
0
def clean():
    for each in Path('.').list_dir():
        filename = each.relative()
        if filename.startswith('.') or filename in ('src', 'manage.py',
                                                    '_config.yml',
                                                    'favicon.ico'):
            continue
        each.delete()
Example #8
0
    def dump(self, file_repr: typing.Union[str, io.TextIOWrapper, Path]):
        if isinstance(file_repr, str):
            return self.dump(Path(file_repr))

        if isinstance(file_repr, Path):
            return self.dump(file_repr.open('w'))

        with file_repr:
            file_repr.write(self.dumps())
Example #9
0
def compile_ex_python_from_filename(filename):
    with Path(filename).open('r') as fr:
        source_code = fr.read()
        result = parse_ext_py(source_code)
    result.state.filename = filename
    check_parsing_complete(source_code, result.tokens, result.state)
    ast = result.result
    code = compile(ast, filename, "exec")
    return code
Example #10
0
def compile_ex_python_from_filename(filename, is_entry_point=True):
    with Path(filename).open('r') as fr:
        source_code = fr.read()
        result = parse_ext_py(source_code)
    result.state.filename = filename
    # double check parsed result.
    check_parsing_complete(source_code, result.tokens, result.state)
    ast = result.result
    code = py_compile(ast, filename, is_entrypoint=is_entry_point)
    return code
Example #11
0
def from_code(input: 'text', to: 'filename'):
    """
    from python source code to json file
    """
    with Path(to).open('w') as fw:
        try:
            data = to_dict(ast.parse(input))
            data['name'] = 'Default'
            json.dump(['<stdin>', data], fw, indent=2)
        except SyntaxError:
            pass
Example #12
0
def find_reley_module_spec(names, reload=False):

    reley_paths = sys.path

    for reley_path in reley_paths:

        if not reload:
            spec = reley_module_specs.get(names)
            if spec:
                return spec

        path_secs = (reley_path, *names.split('.'))
        *init, end = path_secs
        directory = Path(*init)
        if not directory.is_dir():
            continue
        end = end + '.hs'
        for each in os.listdir(str(directory)):
            if each.lower() == end:
                module_path = directory.into(each)
                return get_reley_module_spec_from_path(names, str(module_path))
Example #13
0
def _compile(*filenames: str):
    for filename in filenames:
        code = compile_ex_python_from_filename(filename)
        timestamp = struct.pack('i', int(time.time()))
        marshalled_code_object = marshal.dumps(code)
        filename, ext = os.path.splitext(filename)
        filename = filename + '.pyc'
        with Path(filename).open('wb') as f:
            f.write(MAGIC_NUMBER)
            f.write(timestamp)
            f.write(b'A\x00\x00\x00')
            f.write(marshalled_code_object)
Example #14
0
def get_yapypy_module_spec_from_path(names, module_path):
    with Path(module_path).open('r') as fr:
        spec = ModuleSpec(names, YAPyPyLoader(names, module_path))
        __source__ = fr.read()
        result = parse(__source__, module_path)
        # pprint(result.result)
        check_parsing_complete(__source__, result.tokens, result.state)
        __bytecode__ = py_compile(result.result,
                                  filename=module_path,
                                  is_entrypoint=False)
        spec.__source__ = __source__
        spec.__bytecode__ = __bytecode__
        return spec
Example #15
0
def cc(f: 'input filename', o: 'output filename'):
    """
    compile reley source code into pyc files
    """
    spec = get_reley_module_spec_from_path('main', f)
    code = get_context_from_spec(spec).bc.to_code()
    timestamp = struct.pack('i', int(time.time()))
    marshalled_code_object = marshal.dumps(code)
    with Path(o).open('wb') as f:
        f.write(MAGIC_NUMBER)
        f.write(timestamp)
        f.write(b'A\x00\x00\x00')
        f.write(marshalled_code_object)
Example #16
0
class Env:
    nuget_root = Path('~/.nuget/packages')
    graph_engine_config_path = Path("~", ".graphengine")
    Trinity: Module

    @staticmethod
    def target_framework(name: str) -> bool:
        return name.startswith('netstandard')

    @property
    @cast(str)
    def meta_gen_include(self):
        return self.nuget_root.into(
            r'graphengine.ffi.metagen\2.0.9328\content\include')

    @property
    @cast(str)
    def meta_gen_lib(self):
        return self.nuget_root.into(
            r'graphengine.ffi.metagen\2.0.9328\content\win-x64')

    @property
    def current_offset(self):
        return self.Trinity.Storage.Composite.CompositeStorage.CurrentCellTypeOffset
Example #17
0
def get_reley_module_spec_from_path(names, module_path):
    with Path(module_path).open('r') as fr:
        spec = ModuleSpec(names, ReleyLoader(names, module_path))
        reley_module_specs[names] = spec
        code = fr.read()
        parse = get_parse_fn()

        result = parse(code, module_path)
        check_parsing_complete(code, result.tokens, result.state)
        ast = result.result
        ctx = Ctx({}, {}, Bytecode(), {}, False)
        ctx.visit(ast)
        ctx.bc.filename = module_path
        spec.source_code = code
        spec.context = ctx
        return spec
Example #18
0
def cc(
        filename: 'input source file',
        output:
    'output file name. default to be replacing input file\'s suffix with ".py"' = None,
        name: 'name of language' = 'unname'):
    """
    rbnf source code compiler.
    """

    lang = Language(name)

    with Path(filename).open('r') as fr:
        build_language(fr.read(), lang, filename)

    if not output:
        base, _ = os.path.splitext(filename)

        output = base + '.py'
    lang.dump(output)
Example #19
0
def py_compile(node, filename='<unknown>', is_entrypoint=False):
    if isinstance(node, Tag):
        ctx = _non_ctx.enter_new(node.tag)
        ctx.bc.filename = filename
        ctx.bc.name = '__main__' if is_entrypoint else splitext(
            Path(filename).relative())[0]
        try:
            py_emit(node.it, ctx)
        except SyntaxError as exc:
            exc.filename = filename
            raise exc
        try:
            return ctx.bc.to_code()
        except Exception as e:
            dump_bytecode(ctx.bc)
            raise e
    else:
        tag = to_tagged_ast(node)
        return py_compile(tag, filename, is_entrypoint=is_entrypoint)
Example #20
0
def py_compile(node, filename='<unknown>', is_entrypoint=False):
    if isinstance(node, Tag):
        ctx = _non_ctx.enter_new(node.tag)
        ctx.bc.filename = filename
        ctx.bc.name = '__main__' if is_entrypoint else splitext(
            Path(filename).relative())[1]

        ctx.bc.append(LOAD_GLOBAL('type'))
        ctx.bc.append(STORE_GLOBAL('.yapypy.type'))
        ctx.bc.append(LOAD_GLOBAL('locals'))
        ctx.bc.append(STORE_GLOBAL('.yapypy.locals'))

        try:
            py_emit(node.it, ctx)
        except SyntaxError as exc:
            exc.filename = filename
            raise exc
        return ctx.bc.to_code()
    else:
        tag = to_tagged_ast(node)
        return py_compile(tag, filename, is_entrypoint=is_entrypoint)
Example #21
0
def get_parse_fn():
    global parse_fn

    if not parse_fn:
        language = Language('reley')
        file_path = Path(__file__).parent().into('grammar.rbnf')
        build_language(RBNF, language, str(file_path))
        lexer, impl, namespace = language.lexer, language.implementation, language.namespace
        top_parser = language.named_parsers['module']

        def match(text, filename) -> ze.ResultDescription:
            state = State(impl, filename=filename)
            tokens = tuple(
                setattr(each, 'filename', filename) or each
                for each in lexer(text))
            result: Result = top_parser.match(tokens, state)

            return ze.ResultDescription(state, result.value, tokens)

        parse_fn = match

    return parse_fn
Example #22
0
def clean():
    for each in Path('.').list_dir():
        filename = each.relative()
        if filename in preserved:
          continue
        each.delete()
Example #23
0
from Redy.Tools.PathLib import Path

Path(".restrain/cython_rts").move_to("~/.restrain")
Example #24
0
                    last = last.last
                 
                ret = Cmd(instruction, args, kwargs, last)
                
                if and_then:                    
                    args = and_then.args or ()
                    ret = Cmd(and_then.inst, (ret, *args), and_then.kwargs, and_then.last)
                
                ret
                    
pattern := R'[^`\s\{\}\;]+'
Newline := '\n'

""")

    with Path(__file__).parent().into('_cmd_parser.py').open('w') as file_io:
        file_io.write(ze_exp._lang.dumps())

    from wisepy._cmd_parser import ulang

from rbnf.edsl.rbnf_analyze import check_parsing_complete
_command = ulang.named_parsers['command']
_impl = ulang.implementation


def parse(text: str, strict_match=False) -> ze.ResultDescription:
    tokens = tuple(ulang.lexer(text))

    state = State(_impl, filename='<rush>')

    result = _command.match(tokens, state)
Example #25
0
import rbnf.zero as ze
from Redy.Tools.PathLib import Path

with Path("./task.rbnf").open('r') as f:
    ze_exp = ze.compile(f.read(), use='Test')


with open(Path("./data.xml").__str__(), encoding='utf8') as f:
    text = f.read()
    result = ze_exp.match(text).result

for each in result:
    print(each)


Example #26
0
from setuptools import setup
from Redy.Tools.Version import Version
from Redy.Tools.PathLib import Path

with open('./README.md', encoding='utf-8') as f:
    readme = f.read()

version_filename = 'next_version'
with open(version_filename) as f:
    version = Version(f.read().strip())

with Path("./rbnf/__release_info__.py").open('w') as f:
    f.write('__VERSION__ = {}\n__AUTHOR__ = "thautwarm"'.format(
        repr(str(version))))

setup(name='rbnf',
      version=str(version),
      keywords='parser-generator, context-sensitive, ebnf',
      description="context sensitive grammar parser generator for CPython",
      long_description=readme,
      long_description_content_type='text/markdown',
      license='MIT',
      python_requires='>=3.6.0',
      url='https://github.com/thautwarm/Ruiko',
      author='thautwarm',
      author_email='*****@*****.**',
      packages=[
          'rbnf', 'rbnf.core', 'rbnf.core.parser_algo', 'rbnf.py_tools',
          'rbnf.bootstrap', 'rbnf.edsl', 'rbnf.auto_lexer', 'rbnf.std',
          'rbnf.zero'
      ],
Example #27
0
import rbnf.zero as ze
import sys, os
from rbnf.easy import build_parser
from Redy.Tools.PathLib import Path

pwd = Path(__file__).parent().__str__()
sys.path.append(pwd)
os.chdir(pwd)


def test_predicate():
    ze_exp = ze.compile("""
[python] import predicate_helpers.[*]
lexer_helper := R'.'
a ::= (_{is_ok})+
b ::= (_+){not_ok}
        """,
                        use='a')

    assert len(ze_exp.match("123234").result.item) == 2

    ze_exp = ze.compile("""
[python] import predicate_helpers.[*]
lexer_helper := R'.'
a ::= (_{is_ok})+
b ::= (_+){not_ok}
        """,
                        use='b')

    assert ze_exp.match("123234").result == None
    print(ze_exp.dumps())
Example #28
0
    def rewrite(state: MetaState):
        language: Tokenizer
        head: Tokenizer
        tail: typing.List[Tokenizer]
        import_items: typing.List[Tokenizer]
        python: Tokenizer
        path_secs = [head.value, *(each.value for each in tail or ())]
        if not import_items:
            requires = _Wild()
        else:
            requires = {each.value for each in import_items}

        if language or python:
            if python:
                warnings.warn(
                    "keyword `pyimport` is deprecated, "
                    "use [python] import instead.", DeprecationWarning)
            else:
                language = language.value
                if language != "python":
                    # TODO: c/c++, .net, java
                    raise NotImplementedError(language)

            lang: Language = state.data
            from_item = ".".join(path_secs)
            import_items = "*" if isinstance(
                requires, _Wild) else "({})".format(', '.join(requires))
            import_stmt = f"from {from_item} import {import_items}"
            lang._backend_imported.append(import_stmt)
            exec(import_stmt, lang.namespace)

        else:
            # TODO: this implementation is wrong but implementing the correct one requires the sperate asts and parsers.
            # See `rbnf.std.compiler`, this one is correct though it's deprecated.

            possible_paths = [Path('./', *path_secs)]
            lang = state.data

            ruiko_home = os.environ.get('RBNF_HOME')

            if ruiko_home:
                possible_paths.append(Path(ruiko_home, *path_secs))

            for path in possible_paths:
                filename = str(path)
                if not filename[:-5].lower().endswith('.rbnf'):
                    filename = filename + '.rbnf'
                    path = Path(filename)
                if not path.exists():
                    continue

                with path.open('r') as file:
                    state = MetaState(rbnf.implementation,
                                      requires=requires,
                                      filename=str(path))
                    state.data = lang
                    _build_language(file.read(), state=state)
                if not requires:
                    break

            if requires and not isinstance(requires, _Wild):
                raise ImportError(requires)
Example #29
0
Keyword := 'test:' 'prepare:' '>>>' 'title:' 
NoSwitch ::= ~Keyword
Doctest ::= [(~'title:')* 'title:' name=(~NL)+]
            [(~'prepare:')* 'prepare:' (NoSwitch* '>>>' prepare_lines<<((~NL)+) NL+)*]
            (~'test:')* 'test:' (NoSwitch* '>>>' test_lines<<((~NL)+))* 
            ->
              prepare_lines = recover_codes(sum(prepare_lines, [])) if prepare_lines else ''
              test          = recover_codes(sum(test_lines, []))    if test_lines else ''
              return recover_codes(name) if name else None, prepare_lines, test
                
lexer   := R'.'
TestCase ::= [it=Doctest] _* -> it or None
""",
                    use='TestCase')

yapypy = Path('yapypy')


def dedent_all(text: str):
    while text.startswith(' ') or text.startswith('\t'):
        text = dedent(text)
    return text


class DocStringsCollector(ast.NodeVisitor):
    def __init__(self):
        self.docs = []

    def _visit_fn(self, node: ast.FunctionDef):
        head, *_ = node.body
Example #30
0
from Redy.Tools.PathLib import Path
import os
os.system('cd LLAST && dotnet.exe run')

for each in Path("ir-snippets").list_dir(lambda x: x.endswith('.ll')):
    print(f"run <{each}> :")
    os.system(f"llc-6.0 {str(each)!r} -o asm.s")
    os.system(f"gcc -C asm.s -o out")
    os.system(f"./out ; echo return: $?")

    Path("./out").delete()
Path("asm.s").delete()