Example #1
0
 def schedule_files(self):
   for filename in self.flags.file:
     source = open(filename, "rt").read()
     tokens = token.tokenize(source)
     module = ast.Module(filename)
     parser.Parser(tokens, module).parse_program()
     self.schedule_for_compile(module)
     self.schedule_for_output(module)
Example #2
0
 def schedule_files(self):
   files = self.flags.files or []
   for filename in files:
     source = open(filename, "rt").read()
     tokens = token.tokenize(source)
     module = ast.Module(filename)
     nparser.Parser(tokens, module).parse_program()
     self.schedule_for_compile(module)
     self.schedule_for_output(module)
Example #3
0
 def run_parse_input(self, inputs, parse_thunk):
   for expr in inputs:
     tokens = token.tokenize(expr)
     unit = parse_thunk(tokens)
     # Implicitly import the core module into the oldest stage. There needs to
     # better model for this but for now it helps make builtin methods slightly
     # less magic.
     unit.get_oldest_stage().add_import(data.Path(['core']))
     self.schedule_for_compile(unit)
     self.schedule_for_output(unit)
Example #4
0
 def run_parse_input(self, inputs, parse_thunk):
   for expr in inputs:
     tokens = token.tokenize(expr)
     unit = parse_thunk(tokens)
     # Implicitly import the core module into the oldest stage. There needs to
     # better model for this but for now it helps make builtin methods slightly
     # less magic.
     unit.get_oldest_stage().add_import(data.Path(['core']))
     self.schedule_for_compile(unit)
     self.schedule_for_output(unit)
Example #5
0
 def answer_query(self, query):
     """Answer a query"""
     tokens = token.tokenize(query)
     if tokens[0][1] == 'Is' and tokens[-1][1] == '?':
         return property(tokens[2:-1])
     if len(tokens) == 7 and tokens[-3][1] == 'in':
         return convert(tokens)
     if len(tokens) == 13 and tokens[-3][1] == '=' and tokens[6][1] == '+':
         return "x = " + str(solveSimple(tokens))
     if len(tokens) == 13 and tokens[-3][1] == '=' and tokens[6][1] == '*':
         return "x = " + str(solveMult(tokens))
     try:
         result = infix_parser.infix_eval(query)
         return result
     except:
         return 85
Example #6
0
if __name__ == '__main__':
    default_examples = [
        '뿡',
        '뿍뿍',
        '뽁뽁',
        '~',
        '뿌직',
        '쀼직',
        '뽀옹',
        '뽀뽀옹',
        '북',
        '부북',
        '부부북',
        '부부부북',
        '=3',
        '==3',
        '빵',
        '빠아앙',
    ]

    for idx, example in enumerate(examples):
        print(f'##### Test [{idx}] - {example}')
        result = tokenize(example)
        print(f'-> {result}')

    mix_example = '뿡뿍뿍뽁뽁~뿌직쀼직뽀옹뽀뽀옹북부북부부북부부부북=3==3빵빠아앙'

    print(f'###### Final Test - {mix_example}')
    result = tokenize(mix_example)
    print(f'-> {result}')
Example #7
0
 def parse_source_file(self, name):
   source = open(name, "rt").read()
   tokens = token.tokenize(source)
   parser.Parser(tokens, self.module).parse_program()
Example #8
0
 def parse_manifest(self):
   source = open(self.manifest_file, "rt").read()
   tokens = token.tokenize(source)
   return parser.ModuleParser(tokens).parse_module_manifest()
Example #9
0
def main(input_file: str, debug: bool = False):
    with open(input_file, 'r') as bang_gwi_file:
        code = bang_gwi_file.read()
        thread = Interpreter(20, debug)
        for op in tokenize(code):
            thread.run(op)
Example #10
0
 def parse_source_file(self, name):
   source = open(name, "rt").read()
   tokens = token.tokenize(source)
   nparser.Parser(tokens, self.module, name).parse_program()
Example #11
0
 def parse_manifest(self):
   source = open(self.manifest_file, "rt").read()
   tokens = token.tokenize(source)
   return nparser.ModuleParser(tokens).parse_module_manifest()