Example #1
0
    def begin(self, interactive=False, **kwargs) -> (GenerationContext, list):
        from anoky.generation.default_special_forms_table import default_special_forms_table
        context_root_bindings = Record(
            default_generator=self,
            generator=self,
            domain=SDom,
            special_forms=default_special_forms_table(),
            macros=default_macro_table(),
            id_macros=default_id_macro_table(),
            interactive=interactive)
        context_root_bindings.update(kwargs)

        GC = GenerationContext(**context_root_bindings.__dict__)
        initialization_nodes = []

        # Prepend anoky unit initialization code
        # Something like:
        # import anoky.importer as __akyimp__
        # import anoky.module as __aky__
        # __macros__ = {}
        # __id_macros__ = {}
        # __special_forms__ = {}

        from anoky.generation.stubs import akyimport_init_code as aic
        from anoky.generation.stubs import macrostore_init_code as mic
        initialization_nodes.extend(aic)
        initialization_nodes.extend(mic)

        return GC, initialization_nodes
Example #2
0
    def begin(self, interactive=False, **kwargs) -> (GenerationContext, list):
        from anoky.generation.default_special_forms_table import default_special_forms_table
        context_root_bindings = Record(
            default_generator = self,
            generator = self,
            domain = SDom,
            special_forms = default_special_forms_table(),
            macros = default_macro_table(),
            id_macros = default_id_macro_table(),
            interactive=interactive
        )
        context_root_bindings.update(kwargs)

        GC = GenerationContext(**context_root_bindings.__dict__)
        initialization_nodes = []

        # Prepend anoky unit initialization code
        # Something like:
        # import anoky.importer as __akyimp__
        # import anoky.module as __aky__
        # __macros__ = {}
        # __id_macros__ = {}
        # __special_forms__ = {}

        from anoky.generation.stubs import akyimport_init_code as aic
        from anoky.generation.stubs import macrostore_init_code as mic
        initialization_nodes.extend(aic)
        initialization_nodes.extend(mic)


        return GC, initialization_nodes
Example #3
0
    def expand_unit(self, unit: Node, **kwargs):

        assert (isinstance(unit, Node))
        context_root_bindings = Record(
            default_expander=self,
            expander=self,
            macros=default_macro_table(),
            id_macros=default_id_macro_table(),
            special_forms=default_special_forms_table())
        context_root_bindings.update(kwargs)
        EC = ExpansionContext(**context_root_bindings.__dict__)
        #unit.cg_context = EC
        for element in unit:
            EC.expand(element)

        return EC
Example #4
0
from anoky.common.errors import CompilerError, TokenizingError
from anoky.streams.string_stream import StringStream
from anoky.generation.default_special_forms_table import default_special_forms_table
from anoky.expansion.default_macro_table import default_macro_table, default_id_macro_table
from anoky.syntax.token import is_token
from prompt_toolkit.history import InMemoryHistory
from prompt_toolkit import prompt
import argparse
import ast
import astpp
import sys
import traceback
import os
import anoky.syntax.tokens as Tokens
__parser__ = AnokyParser()
__macros__ = default_macro_table()
__id_macros__ = default_id_macro_table()
__special_forms__ = default_special_forms_table()
code_expander = DefaultExpander()
code_generator = DefaultGenerator()


def anoky_tokenize(stream, options):
    tokenized_node = __parser__.tokenize_into_node(stream,
                                                   emmit_restart_tokens=False)
    if options.print_tokens:
        print('\n——›–  Tokenized source  –‹——')
        for token in tokenized_node:
            print(str(token))
    errors = []
    for token in tokenized_node:
Example #5
0
from anoky.common.errors import CompilerError, TokenizingError
from anoky.streams.string_stream import StringStream
from anoky.generation.default_special_forms_table import default_special_forms_table
from anoky.expansion.default_macro_table import default_macro_table, default_id_macro_table
from anoky.syntax.token import is_token
from prompt_toolkit.history import InMemoryHistory
from prompt_toolkit import prompt
import argparse
import ast
import astpp
import sys
import traceback
import os
import anoky.syntax.tokens as Tokens
__parser__ = AnokyParser()
__macros__ = default_macro_table()
__id_macros__ = default_id_macro_table()
__special_forms__ = default_special_forms_table()
code_expander = DefaultExpander()
code_generator = DefaultGenerator()
def anoky_tokenize(stream,options):
    tokenized_node = __parser__.tokenize_into_node(stream, emmit_restart_tokens=False)
    if options.print_tokens:
        print('\n——›–  Tokenized source  –‹——')
        for token in tokenized_node:
            print(str(token))
    errors = []
    for token in tokenized_node:
        if is_token(token, Tokens.ERROR):
            errors.append(token)
    if len(errors) > 0: