Beispiel #1
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     try:
         tree = parser.parse()
         return AstVizGen(tree).return_str_tree()
     except Error as e:
         return e.message
Beispiel #2
0
 def parse_file(self, filename):
     print("parse")
     if filename == None:
         return None
     else:
         if not filename.endswith(".hs"):
             return None
         else:
             p = Parser(filename)
             p.start_parse()
             return (p.data_types, p.constructors, p.functions)
Beispiel #3
0
def run(input_f, output_f):

    code = []

    file = open(input_f, "r")
    for line in file:
        line = line.replace('\n', '')
        if len(line) > 0:
            code.append(line)

    parser = Parser()
    gen = Generator()
    parser.set_code_lines(code)
    gen.generate(output_f, parser.parse())
Beispiel #4
0
def start():

    print ("# ------ Server Part1: Server sync_navigator ------ #")
    # 1) First actions : Parser and Mapping for the field
    print("[Main] Loading XML File")

    parser = Parser('../Common/Parser/files/field.xml')

    mapping = Mapping(parser.obj)
    headers = {'content-type': 'field initialisation'}

    # ------ Web server not working atm ------- #
    # Server_CommunicationManager.send_data_to_webserver(mapping, headers)

    # 2) Initiate components : Robot_CommunicationManager, websocket
    # 3)Launch WebSocket and send the field Mapping
    # Through WebSocket, User can decide to add more machines or not. If so, we increment local port, and open a new socket_server
    com_manager = Server_CommunicationManager.getInstance()


    print ("# ------ End of initialisation ------ #")
    print ("# ------ Server Part2: sync_navigator information exchange  ------ #")
    # Create new thread for s server listening
    # 6) Connect with robot
    com_manager.new_listening_socket_server()
    print ("# ------ Robot connection and setup done ------ #")
    # 8) Send mapping to Robot
    message = MappingMsg(mapping.pointDict)
    # Let's imagine we want to communicate with robot "x248482"
    # This information will be provided by the websocket
    robot1 = "x248482"
    # com_manager.send_message_to_robot(message, com_manager.get_robot_info(robot1))
    CommunicationManager.send_message_to_localhost(message, const.CLIENT_DEFAULT_PORT)
    '''
Beispiel #5
0
 def __init__(self, parser=Parser(), converter=TypeConverter()):
     self.parser = parser
     self.converter = converter
     self.program = None
     self.symbol_table = [dict()]
     self.tree = None
     self.functions = None
     self.scope = 0
     self.robot = None
     self.exit = False
     self.correct = True
     self.error = Error_Handler()
     self.error_types = {
         'UnexpectedError': 0,
         'RedeclarationError': 1,
         'UndeclaredError': 2,
         'IndexError': 3,
         'InitSizeError': 4,
         'ConvertationError': 5,
         'ParametrError': 6,
         'SumSizeError': 7,
         'IndexNumError': 8,
         'ReturnRepeatError': 9,
         'RecursionError': 10,
         'ReturnError': 11,
         'CommandError': 12,
         'RobotError': 13,
         'SumTypesError': 14,
         'FuncStatementsError': 15,
         'FuncDescriptionError': 16
     }
Beispiel #6
0
def calculate(filename):
    """
    Calculate an expression from a file
    :param filename: name of file containing the expression
    :return:         result of evaluating the expression in the file. None, if the file was empty
    """
    tree = Parser().parse(filename)
    return evaluate_tree(tree)
Beispiel #7
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     semantics = SemanticAnalyzer(parser, False)
     try:
         semantics.analyze()
     except Error as e:
         return e.message
Beispiel #8
0
 def get_result(self, test):
     tokenizer = Tokenizer(text=test)
     parser = Parser(tokenizer)
     interpreter = Interpreter(parser)
     try:
         res = str(interpreter.calc())
         return res
     except Error as e:
         return e.message
Beispiel #9
0
 def enrich_session(self, sessions=None):
     if sessions:
         for session in sessions:
             output = subprocess.Popen(
               [
                 self.app['bird_instance_'+session['protocol']],
                 "sh route export {} count".format(session['name'])
                 ],
               stdout=subprocess.PIPE).communicate()[0]
             session['exported_routes'] = Parser.parse_output_to_routes_count(output)
             output = subprocess.Popen(
               [
                 self.app['bird_instance_'+session['protocol']],
                 "sh route protocol {} count".format(session['name'])
                 ],
               stdout=subprocess.PIPE).communicate()[0]
             session['announced_routes'] = Parser.parse_output_to_routes_count(output)
     return sessions
Beispiel #10
0
def test_ant():
    # test function on i.antipiev logs
    with open('../logs_ant/result_boosted.txt', 'w') as f:
        for time_window in range(5000, 30001, 5000):
            print('Working with time window equals to {} ms...'.format(time_window), file=f)
            bot_parser = Parser('../logs_ant/log_bot_Game.csv', '../logs_ant/log_bot_Touch.csv', step=time_window)
            human_parser = Parser('../logs_ant/log_human_Game.csv', '../logs_ant/log_human_Touch.csv', step=time_window)
            bot_parser.set_label(1)
            human_parser.set_label(0)
            data = pd.concat([bot_parser.df, human_parser.df], ignore_index=True)
            features, targets = prepare_data(data)
            clf = Classifier(boosted=True).get_data(features, targets, test_size=0.25).fit()
            print('Score:', clf.score(), file=f)
            print('Confusion matrix:\n{}\n'.format(clf.confusion_matrix()), file=f)
def main():
    filename = input("Input name of the file with the expression:\t")
    tree = Parser().parse(filename)
    if not tree:
        print("File is empty")
        exit(0)

    try:
        import pptree
        pptree.print_tree(tree)
    except ImportError:
        print(
            "You can use module \'pptree\' to visualize ast tree.\nUse pip to install it\n\n"
        )

    res = calculator.evaluate_tree(tree)
    print("Result:", res)
Beispiel #12
0
 def all_bgp_session(self, protocol=None):
     if self.app['debug']:
         fake_data = os.path.join(os.path.dirname(__file__), 'summary.txt')
         f = open(fake_data, 'r')
         output = f.read()
     else:
         if protocol:
             output = subprocess.Popen(
               [self.app['bird_instance_'+protocol], "show protocols all"],
               stdout=subprocess.PIPE).communicate()[0]
         else:
             output = subprocess.Popen(
               [self.app['bird_instance_ipv4'], "show protocols all"],
               stdout=subprocess.PIPE).communicate()[0]
             output += subprocess.Popen(
               [self.app['bird_instance_ipv6'], "show protocols all"],
               stdout=subprocess.PIPE).communicate()[0]
     return Parser.parse_output_to_sessions(output)
Beispiel #13
0
def human_vs_human_2():
    # Human vs human classification
    with open('human_vs_human_2.txt', 'w') as f:
        for time_window in range(5000, 30001, 5000):
            print('Working with {} second window...'.format(time_window))
            h1_logs = Parser('../logs/human_game_logs_1.csv', '../logs/human_touch_logs_1.csv', step=time_window)
            h2_logs = Parser('../logs_new/human/session_2019-01-27_15-01-22/gamelog_Game_2019-01-27_15-01-22.csv',
                             '../logs_new/human/session_2019-01-27_15-01-22/gamelog_Touch_2019-01-27_15-01-22.csv',
                             step=time_window)

            h1_logs.set_label(1)
            h2_logs.set_label(2)

            data = pd.concat([h1_logs.df, h2_logs.df], ignore_index=True)
            features, targets = prepare_data(data)
            if time_window == 20000:
                scatter(features, targets, save='h_2')
                simple_plot(features, targets, save='h_2')
                tsne_plot(features, targets, perp=10, save='h_2')
            clf = Classifier(boosted=False).get_data(features, targets, test_size=0.25).fit()
            print('Score:', clf.score(), file=f)
            print('Confusion matrix:\n{}\n'.format(clf.confusion_matrix()), file=f)
Beispiel #14
0
def main(input_file, output_file):
    try:
        text = open(input_file, 'r').read()
    except FileNotFoundError:
        print(CompilerError(UNKNOWN_FILE % input_file, 0, 0))

    lexer = Lexer()
    tokens = lexer.tokenize(text)
    print_error(lexer.errors)

    parser = Parser(lexer)
    ast = parser(text)
    print_error(parser.errors)

    collector = TypeCollector()
    collector.visit(ast)
    context = collector.context
    print_error(collector.errors)

    builder = TypeBuilder(context)
    builder.visit(ast)
    print_error(builder.errors)

    variable = VariableCollector(context)
    scope = variable.visit(ast)
    print_error(variable.errors)

    checker = TypeChecker(context)
    checker.visit(ast, scope)
    print_error(checker.errors)

    cil = CIL(context)
    cil_ast = cil.visit(ast, scope)
    #open(cil_file, 'w').write(str(cil_ast))

    mips = MIPS(context.build_inheritance_graph())
    data_code, text_code = mips.visit(cil_ast)
    open(output_file, 'w').write(get_code(text_code, data_code))

    exit(0)
Beispiel #15
0
 def check_semantic(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     semantic = SemanticAnalyzer(parser, True)
     semantic.analyze()
Beispiel #16
0
 def print_ast(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     tree = parser.parse()
     AstVizGen(tree).generate()
Beispiel #17
0
 def calculate(self):
     tokenizer = Tokenizer(self.file)
     parser = Parser(tokenizer)
     interpreter = Interpreter(parser)
     print(interpreter.calc())
Beispiel #18
0
import sys

from Parser.parser import Parser
from Algorithms.NaiveAlgorithm import NaiveAlgorithm

if __name__ == '__main__':
    parser = Parser()

    for filename in sys.argv[1:]:
        caches, videos, endpoints = parser.parseFile(sys.argv[1])

        # print caches
        # print [video.endpointsRequests for video in videos]
        # print [endpoint.videoRequests for endpoint in endpoints]

        algorithm = NaiveAlgorithm()
        algorithm.main(caches, videos, endpoints)

        with open(filename + '_result', 'w') as file:
            cachesWithVideo = filter(lambda cache: len(cache.videos) != 0,
                                     caches)

            file.write(str(len(cachesWithVideo)) + '\n')
            cacheIndex = 0
            for cache in cachesWithVideo:
                if len(cache.videos) == 0:
                    cacheIndex += 1
                    continue

                file.write(str(cacheIndex))