Example #1
0
    def __init__(self,
                 cities,
                 gas_stations,
                 storage,
                 min_sleep_time=15,
                 max_sleep_time=60):
        """
        Initializes a crawler.

        Args:
          cities: a list of "city, state"
          gas_stations: a list of gas stations
          sleep_time: number of seconds to sleep after web request
        """

        # list of cities
        self.cities = cities

        # list of gas stations
        self.gas_stations = gas_stations

        # sleep time
        self.min_sleep_time = min_sleep_time
        self.max_sleep_time = max_sleep_time

        # parser for the gas station blocks
        self.parser = Parser()

        # parameters to extract
        self.params = [
            'address', 'brand', 'lat', 'lon', 'price_1', 'price_2', 'price_3'
        ]

        # storage medium
        self.storage = storage
Example #2
0
    def compile_file(self, filename):
        """

        """
        print("compile file...<<", filename)

        idx = self.find_proto(filename)
        if idx >= 0:
            return idx

        with codecs.open(filename, encoding='utf-8') as f:
            print('=' * 100)

            ast = Parser(filename, f.read()).parse_file()

            print('=' * 100)

            # print('ast.execute result: >>', ast.execute())

            print('=' * 100)

            proto = ProtoType(None, filename)
            proto.name = filename
            proto.env = self

            ast.to_bin(proto)

            print("proto-->>", proto)
            idx = self.add_proto(filename, proto)
            return idx
Example #3
0
    def process_node_create_html(self, nodeid):
        if self.options.verbose:
            print 'html', nodeid
        errors = 0
        infile = os.path.join('src', self.book.name, self.stage_name,
                              nodeid + '.txt')
        success = True
        try:
            parser = Parser(self, self.options)
            if not parser.parse(infile, nodeid, todo=True):
                success = False
        except Exception as e:
            print 'Exception:', e
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback)
            success = False

        if not success:
            print '%s' % nodeid
            print 'Parse failure'
            errors += 1
            sys.exit(0)
        parser.export_html(
            os.path.join(self.book.name, self.stage_name, nodeid + '.html'))

        return errors
    def upload_files_from_storage_to_search(self):
        print("uploading files from storage account to search")
        vi_output_files = self.storage_client.list_files_in_container(
            self.insights_container)
        i = 0
        for file in vi_output_files:
            i += 1
            try:
                json_object = json.loads(
                    self.storage_client.get_blob_string(
                        self.insights_container, file.name).encode(
                        )  # Encode as UTF-8 in case UTF-8 BOM
                )
                if json_object["state"] == "Processed":
                    parser = Parser()
                    intervals = parser.parse_vi_json(json_object)
                    intervals = list(intervals.values())
                    for item in intervals:
                        item["@search.action"] = "upload"

                    documents = {"value": intervals}
                    print(
                        str(i) +
                        f": uploading {str(file.name)} to search index")
                    self.upload_to_search(documents)
                    self.write_status_file(str(file.name),
                                           self.ingest_log_filename)
            except ValueError:
                print("could not process " + str(file))
                self.write_status_file(file, self.ingest_failure_log_filename)
Example #5
0
    def __init__(self):
        code = ("    var myset = set() \n var oo = set() n = 653")

        p = Parser()
        p.parse(code)

        raw_input("Press Enter to continue...")
Example #6
0
    def process_html(self, infile, outfile, options):
        """
		Process simple (non-node) src file and generate an HTML file.
		"""
        errors = 0
        name = os.path.splitext(os.path.basename(infile))[0]
        make_dir(os.path.dirname(outfile))
        success = True
        if options.verbose:
            print '  %s -> html' % infile
        try:
            parser = Parser(None, options)
            if not parser.parse(infile, name):
                print 'Failure during parse_main'
                success = False
        except:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            traceback.print_exception(exc_type, exc_value, exc_traceback)
            success = False

        if not success:
            print '%s' % file
            print 'Parse failure'
            errors += 1
        else:
            parser.export_html(outfile)

        return errors
Example #7
0
 def test_parses_succeessfully(self):
     reader = CsvReader(os.path.join(TEST_DIR, '3.csv'))
     formatter = WeekFormatter()
     parser = Parser(reader, formatter)
     actual = parser.parse()
     expected = [{
         'square': 9,
         'day': 'mon',
         'value': 3,
         'description': 'third_desc 9'
     }, {
         'square': 9,
         'day': 'tue',
         'value': 3,
         'description': 'third_desc 9'
     }, {
         'square': 4,
         'day': 'wed',
         'value': 2,
         'description': 'third_desc 4'
     }, {
         'double': 4,
         'day': 'thu',
         'value': 2,
         'description': 'third_desc 4'
     }, {
         'double': 2,
         'day': 'fri',
         'value': 1,
         'description': 'third_desc 2'
     }]
     for dct1, dct2 in zip(expected, actual):
         self.assertDictEqual(dct1, dct2)
    def upload_local_files_to_search(self):
        print("uploading local files to search")
        files = self.read_files_from_directory(self.vi_output_directory)
        i = 0
        for file in files:
            path = os.path.join(self.vi_output_directory, file)
            i += 1
            with open(path) as f:
                try:
                    json_object = json.load(f)
                    if json_object["state"] == "Processed":
                        parser = Parser()
                        intervals = parser.parse_vi_json(json_object)
                        intervals = list(intervals.values())
                        for item in intervals:
                            item["@search.action"] = "upload"

                        documents = {"value": intervals}
                        print(
                            str(i) +
                            f": uploading {str(file)} to search index")
                        self.upload_to_search(documents)
                        self.write_status_file(file, self.ingest_log_filename)

                except ValueError:
                    print("could not process " + str(file))
                    self.write_status_file(file,
                                           self.ingest_failure_log_filename)
Example #9
0
def test_gohugo_parser(title, desc, content, tags, date, clean):
    expected_template = '''
---
title: ''' + title + '''
date: ''' + date + '''
draft: true
summary: ''' + desc + ''' ''' + content + '''
tags: ''' + str(tags) + '''
isfeed: true
id: 1
---
        '''
    parser = Parser()
    template, post_id = parser.gohugo_parser(title,
                                             desc,
                                             content,
                                             tags,
                                             date,
                                             post_id=1)

    if clean:
        assert_that(template).is_equal_to(expected_template) if title != '' \
            else assert_that(len(template)).is_greater_than(len(expected_template))
        assert_that(len(template)).is_equal_to(len(expected_template)) if title != '' \
            else assert_that(template).is_not_equal_to(expected_template)
    else:
        assert_that(template).is_not_equal_to(expected_template)
        assert_that(len(template)).is_less_than(len(expected_template)) if title != '' \
            else assert_that(len(template)).is_not_equal_to(len(expected_template))

    assert_that(post_id).is_not_none()
    def test_parsing_infix_expressions(self):
        tests = [
            ["5 + 5;", 5, "+", 5],
            ["5 - 5;", 5, "-", 5],
            ["5 * 5;", 5, "*", 5],
            ["5 / 5;", 5, "/", 5],
            ["5 > 5;", 5, ">", 5],
            ["5 < 5;", 5, "<", 5],
            ["5 == 5;", 5, "==", 5],
            ["5 != 5;", 5, "!=", 5],
            ["foobar + barfoo;", "foobar", "+", "barfoo"],
            ["foobar - barfoo;", "foobar", "-", "barfoo"],
            ["foobar * barfoo;", "foobar", "*", "barfoo"],
            ["foobar / barfoo;", "foobar", "/", "barfoo"],
            ["foobar > barfoo;", "foobar", ">", "barfoo"],
            ["foobar < barfoo;", "foobar", "<", "barfoo"],
            ["foobar == barfoo;", "foobar", "==", "barfoo"],
            ["foobar != barfoo;", "foobar", "!=", "barfoo"],
            ["true == true", True, "==", True],
            ["true != false", True, "!=", False],
            ["false == false", False, "==", False],
        ]

        for tt in tests:
            lexer = Lexer(tt[0])
            parser = Parser(lexer)
            program = parser.parse_program()

            self.assert_parser_errors(parser)
            self.assertEqual(1, len(program.statements))

            exp = program.statements[0].expression

            self.assert_infix_expression(tt[1], tt[2], tt[3], exp)
Example #11
0
  def make_track(self):
    if not self.unstable:
      random.seed(self.content)
    track = Track()
    track.add_event(TempoEvent(100)) #bpm
    parser = Parser(self.content)

    channel_stack = []
    used_up_channels = []

    key = self._generate_key()
    last_note = self.select_note(key, None)
    frame = StackFrame(track.new_channel(), key, last_note)

    frame.channel.add_event(ProgramChangeEvent, 0, random.choice(instruments.values()))
    frame.channel.add_event(SetVolumeEvent, 0, 0x32)
    frame.channel.add_event(NoteOnEvent, 0, last_note, 0x42)

    # for every function scope we enter, we want a new channel,
    # and we want a new key for it, and we want to immediately start
    # a note playing. In function scope, every assignment should end that note and start a new note
    # on that channel. Every other scope starts another note that ends when that scope ends, and behaves similarly
    # with assignments

    # when a scope ends we end its current note
    tok = parser.next_token()
    max_depth = 0
    while tok is not None:
      if tok == Tokens.FUNCTION_START:
        # create a new channel, a new key, and the last note put on this channel
        # start that note playing
        key = self._generate_key(depth=len(channel_stack))
        last_note = self.select_note(key, None)

        channel_stack.append(frame)
        if len(channel_stack) > max_depth:
          max_depth = len(channel_stack)
        if used_up_channels:
          frame = used_up_channels.pop()
        else:
          frame = StackFrame(track.new_channel(), key, last_note)
        frame.channel.add_event(ProgramChangeEvent, 0, random.choice(instruments.values()))
        frame.channel.add_event(SetVolumeEvent, 0, 0x32)
        frame.channel.add_event(NoteOnEvent, 0, frame.last_note, 0x42)
        self.maybe_update_stack(channel_stack)
      elif tok == Tokens.FUNCTION_END:
        frame.channel.add_event(NoteOffEvent, self.eighth_note, frame.last_note, 0x42)
        used_up_channels.append(frame)
        frame = channel_stack.pop()
      elif tok == Tokens.ASSIGNMENT:
        frame.channel.add_event(NoteOffEvent, self.eighth_note, frame.last_note, 0x42)
        frame.last_note = self.select_note(frame.key, frame.last_note)
        frame.channel.add_event(NoteOnEvent, 0, frame.last_note, 0x42)
        self.maybe_update_stack(channel_stack)
      elif tok == Tokens.OTHER_KEYWORD:
        pass
      elif tok == Tokens.INVALID_SOURCE:
        print 'Error in %s on line %d' % (self.filename, parser.line_no)
      tok = parser.next_token()
    return track
def test_person():
    Name = fact(
        'Name',
        ['first', 'last'],
    )
    Person = fact(
        'Person',
        ['position', 'name']
    )

    LAST = and_(
        gram('Surn'),
        not_(gram('Abbr')),
    )
    FIRST = and_(
        gram('Name'),
        not_(gram('Abbr')),
    )

    POSITION = morph_pipeline([
        'управляющий директор',
        'вице-мэр'
    ])

    gnc = gnc_relation()
    NAME = rule(
        FIRST.interpretation(
            Name.first
        ).match(gnc),
        LAST.interpretation(
            Name.last
        ).match(gnc)
    ).interpretation(
        Name
    )

    PERSON = rule(
        POSITION.interpretation(
            Person.position
        ).match(gnc),
        NAME.interpretation(
            Person.name
        )
    ).interpretation(
        Person
    )

    parser = Parser(PERSON)

    match = parser.match('управляющий директор Иван Ульянов')
    assert match

    assert match.fact == Person(
        position='управляющий директор',
        name=Name(
            first='Иван',
            last='Ульянов'
        )
    )
Example #13
0
def test_const():
    RULE = rule(
        'a'
    ).interpretation(
        const(1)
    )
    parser = Parser(RULE)
    match = parser.match('a')
    assert match.fact == 1
Example #14
0
 def setUpClass(cls):
     logging.basicConfig(level=logging.INFO)
     parser = Parser('../test_data', 'euro16_k2.txt')
     parser = parser.get_scenario(2)
     networkTopology = parser.networkTopology
     cls.graph = graph_factory(networkTopology)
     cls.candidate_path_fetcher = CandidatePathsFetcher(parser.candidatePaths,
                                                        networkTopology.n_nodes,
                                                        parser.pathsLengths)
Example #15
0
def test_rule_custom():
    RULE = rule(
        '3', '.', '14'
    ).interpretation(
        custom(float)
    )
    parser = Parser(RULE)
    match = parser.match('3.14')
    assert match.fact == 3.14
Example #16
0
def text_normalized():
    RULE = rule(
        'московским'
    ).interpretation(
        normalized()
    )
    parser = Parser(RULE)
    match = parser.match('московским')
    assert match.fact == 'московский'
Example #17
0
def test_inflected():
    RULE = rule(
        'московским'
    ).interpretation(
        inflected({'nomn', 'femn'})
    )
    parser = Parser(RULE)
    match = parser.match('московским')
    assert match.fact == 'московская'
Example #18
0
def test_attribute_const():
    F = fact('F', 'a')
    RULE = rule(
        'январь'
    ).interpretation(
        F.a.const(1)
    )
    parser = Parser(RULE)
    match = parser.match('январь')
    assert match.fact == 1
Example #19
0
    def test_one(self):
        l = Lexer('tests/sample.ps')
        l.lex()
        self.assertEqual([(t.name, t.value) for t in l.tokens], [('KEYWORD', 'program'), ('IDENTIFIER', 'hellowld'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('IDENTIFIER', 'writeln'), ('LP', '('), ('BASE10_NUM', '2'), ('RP', ')'), ('SEMICOLON', ';'), ('IDENTIFIER', 'readln'), ('SEMICOLON', ';'), ('KEYWORD', 'end')])

        p = Parser(l.tokens)
        try:
            p.parse()
        except SyntaxError:
            self.fail()
Example #20
0
def test_attribute():
    F = fact('F', 'a')
    RULE = rule(
        'a'
    ).interpretation(
        F.a
    )
    parser = Parser(RULE)
    match = parser.match('a')
    assert match.fact == 'a'
Example #21
0
def main(args):
    parser = Parser()
    iface = parser.parse(args.specification.read())

    for generator in args.generators:
        for file_path, content in generator.generate(iface).items():
            write_generated_file(path.join(generator.name, file_path),
                                 content,
                                 args.out_directory,
                                 args.update)
Example #22
0
def test_rule_custom_custom():
    MAPPING = {'a': 1}
    RULE = rule(
        'A'
    ).interpretation(
        custom(str.lower).custom(MAPPING.get)
    )
    parser = Parser(RULE)
    match = parser.match('A')
    assert match.fact == 1
 def setUp(self):
     parser = Parser('./data', 'euro16_k2.txt')
     self.scenario = parser.get_scenario(0)
     self.graph = graph_factory(self.scenario.networkTopology)
     self.logger = logging.getLogger(str(self.__class__))
     candidate_path_fetcher = CandidatePathsFetcher(self.scenario.candidatePaths,
                                                    self.scenario.networkTopology.n_nodes,
                                                    self.scenario.pathsLengths)
     self.demand_factory = DemandsFactory(candidate_path_fetcher)
     self.sa = SimulatedAnnealing()
Example #24
0
    def test_two(self):
        l = Lexer('tests/sample2.ps')
        l.lex()
        self.assertEqual([(t.name, t.value) for t in l.tokens], [('KEYWORD', 'program'), ('IDENTIFIER', 'exFunction'), ('SEMICOLON', ';'), ('KEYWORD', 'var'), ('IDENTIFIER', 'a'), ('COMMA', ','), ('IDENTIFIER', 'b'), ('COMMA', ','), ('IDENTIFIER', 'ret'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'function'), ('IDENTIFIER', 'max'), ('LP', '('), ('IDENTIFIER', 'num1'), ('COMMA', ','), ('IDENTIFIER', 'num2'), ('COLON', ':'), ('KEYWORD', 'integer'), ('RP', ')'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'var'), ('IDENTIFIER', 'result'), ('COLON', ':'), ('KEYWORD', 'integer'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('KEYWORD', 'if'), ('LP', '('), ('IDENTIFIER', 'num1'), ('GT', '>'), ('IDENTIFIER', 'num2'), ('RP', ')'), ('KEYWORD', 'then'), ('IDENTIFIER', 'result'), ('ATTRIB', ':='), ('IDENTIFIER', 'num1'), ('KEYWORD', 'else'), ('IDENTIFIER', 'result'), ('ATTRIB', ':='), ('IDENTIFIER', 'num2'), ('SEMICOLON', ';'), ('IDENTIFIER', 'max'), ('ATTRIB', ':='), ('IDENTIFIER', 'result'), ('SEMICOLON', ';'), ('KEYWORD', 'end'), ('SEMICOLON', ';'), ('KEYWORD', 'begin'), ('IDENTIFIER', 'a'), ('ATTRIB', ':='), ('BASE10_NUM', '100'), ('SEMICOLON', ';'), ('IDENTIFIER', 'b'), ('ATTRIB', ':='), ('BASE10_NUM', '200'), ('SEMICOLON', ';'), ('IDENTIFIER', 'ret'), ('ATTRIB', ':='), ('IDENTIFIER', 'max'), ('LP', '('), ('IDENTIFIER', 'a'), ('COMMA', ','), ('IDENTIFIER', 'b'), ('RP', ')'), ('SEMICOLON', ';'), ('IDENTIFIER', 'writeln'), ('LP', '('), ('IDENTIFIER', 'ret'), ('RP', ')'), ('SEMICOLON', ';'), ('KEYWORD', 'end')])

        p = Parser(l.tokens)
        try:
            p.parse()
        except SyntaxError:
            self.fail()
 def setUpClass(cls):
     logging.basicConfig(level=logging.INFO)
     parser = Parser('../test_data', 'euro16_k2.txt')
     scenario = parser.get_scenario(2)
     networkTopology = scenario.networkTopology
     cls.graph = graph_factory(networkTopology)
     cls.candidate_path_fetcher = CandidatePathsFetcher(scenario.candidatePaths,
                                                        networkTopology.n_nodes,
                                                        scenario.pathsLengths)
     cls.demands = unicast_demands_factory(scenario.unicastDemands, cls.candidate_path_fetcher)
 def setUp(self):
     parser = Parser("../test_data", "euro16_k2.txt")
     scenario = parser.get_scenario(2)
     graph = graph_factory(scenario.networkTopology)
     candidatePathFetcher = CandidatePathsFetcher(scenario.candidatePaths, len(graph.nodes), scenario.pathsLengths)
     # get path
     list_of_paths1 = candidatePathFetcher.fetch_candidates(0, 1)
     # 1, 6, 10
     self.path = list_of_paths1[1]
     self.edges = graph.edges
     self.sa = SpectrumAllocator(graph)
Example #27
0
def test_predicate_attribute():
    F = fact('F', ['a'])
    RULE = rule(
        eq('a').interpretation(F.a)
    ).interpretation(F)
    parser = Parser(RULE)
    match = parser.match('a')
    record = match.fact
    assert record == F(a='a')
    assert record.spans == [(0, 1)]
    assert record.as_json == {'a': 'a'}
    def test_parsing_empty_hash_literal(self):
        lexer = Lexer("{}")
        parser = Parser(lexer)
        program = parser.parse_program()

        self.assert_parser_errors(parser)

        hash_literal = program.statements[0].expression

        self.assertIsInstance(hash_literal, HashLiteral)

        self.assertEqual(0, len(hash_literal.pairs))
    def test_identifier_expression(self):
        lexer = Lexer("foobar;")
        parser = Parser(lexer)
        program = parser.parse_program()

        self.assert_parser_errors(parser)
        self.assertEqual(1, len(program.statements))

        ident = program.statements[0].expression

        self.assertEqual("foobar", ident.value)
        self.assertEqual("foobar", ident.token_literal())
    def test_integer_literal_expression(self):
        lexer = Lexer("5;")
        parser = Parser(lexer)
        program = parser.parse_program()

        self.assert_parser_errors(parser)
        self.assertEqual(1, len(program.statements))

        literal = program.statements[0].expression

        self.assertEqual(5, literal.value)
        self.assertEqual("5", literal.token_literal())
Example #31
0
def test_rule_attribute_custom():
    F = fact('F', ['a'])
    RULE = rule(
        '1'
    ).interpretation(
        F.a
    ).interpretation(
        custom(int)
    )
    parser = Parser(RULE)
    match = parser.match('1')
    assert match.fact == 1
    def test_operator_precedence_parsing(self):
        tests = [
            ["-a * b", "((-a) * b)"],
            ["!-a", "(!(-a))"],
            ["a + b + c", "((a + b) + c)"],
            ["a + b - c", "((a + b) - c)"],
            ["a * b * c", "((a * b) * c)"],
            ["a * b / c", "((a * b) / c)"],
            ["a + b / c", "(a + (b / c))"],
            ["a + b * c + d / e - f", "(((a + (b * c)) + (d / e)) - f)"],
            ["3 + 4; -5 * 5", "(3 + 4)((-5) * 5)"],
            ["5 > 4 == 3 < 4", "((5 > 4) == (3 < 4))"],
            ["5 < 4 != 3 > 4", "((5 < 4) != (3 > 4))"],
            [
                "3 + 4 * 5 == 3 * 1 + 4 * 5",
                "((3 + (4 * 5)) == ((3 * 1) + (4 * 5)))",
            ],
            ["true", "true"],
            ["false", "false"],
            ["3 > 5 == false", "((3 > 5) == false)"],
            ["3 < 5 == true", "((3 < 5) == true)"],
            ["1 + (2 + 3) + 4", "((1 + (2 + 3)) + 4)"],
            ["(5 + 5) * 2", "((5 + 5) * 2)"],
            ["2 / (5 + 5)", "(2 / (5 + 5))"],
            ["(5 + 5) * 2 * (5 + 5)", "(((5 + 5) * 2) * (5 + 5))"],
            ["-(5 + 5)", "(-(5 + 5))"],
            ["!(true == true)", "(!(true == true))"],
            ["a + add(b * c) + d", "((a + add((b * c))) + d)"],
            [
                "add(a, b, 1, 2 * 3, 4 + 5, add(6, 7 * 8))",
                "add(a, b, 1, (2 * 3), (4 + 5), add(6, (7 * 8)))",
            ],
            [
                "add(a + b + c * d / f + g)",
                "add((((a + b) + ((c * d) / f)) + g))",
            ],
            [
                "a * [1, 2, 3, 4][b * c] * d",
                "((a * ([1, 2, 3, 4][(b * c)])) * d)",
            ],
            [
                "add(a * b[2], b[1], 2 * [1, 2][1])",
                "add((a * (b[2])), (b[1]), (2 * ([1, 2][1])))",
            ],
        ]

        for tt in tests:
            lexer = Lexer(tt[0])
            parser = Parser(lexer)
            program = parser.parse_program()

            self.assert_parser_errors(parser)
            self.assertEqual(tt[1], str(program))
Example #33
0
def test_inflected_custom():
    MONTHS = {
        'январь': 1
    }
    RULE = rule(
        'январе'
    ).interpretation(
        inflected({'nomn', 'sing'}).custom(MONTHS.get)
    )
    parser = Parser(RULE)
    match = parser.match('январе')
    assert match.fact == 1
Example #34
0
def test_normalized_custom():
    MONTHS = {
        'январь': 1
    }
    RULE = rule(
        'январе'
    ).interpretation(
        normalized().custom(MONTHS.get)
    )
    parser = Parser(RULE)
    match = parser.match('январе')
    assert match.fact == 1
Example #35
0
def test_tagger():
    text = 'a b c d e f g'
    A = tag('I').repeatable()
    parser = Parser(A, tagger=MyTagger())

    matches = parser.findall(text)
    spans = [_.span for _ in matches]
    substrings = [
        text[start:stop]
        for start, stop in spans
    ]
    assert substrings == ['b c', 'e f']
    def setUpClass(cls):
        logging.basicConfig(level=logging.INFO)
        parser = Parser('../test_data', 'euro16_k2.txt')
        # 40.dem, 10.dea, 31.rep
        cls.parser = parser.get_scenario(261)
        networkTopology = cls.parser.networkTopology
        cls.graph = graph_factory(networkTopology)
        cls.candidate_path_fetcher = CandidatePathsFetcher(cls.parser.candidatePaths,
                                                           networkTopology.n_nodes,
                                                           cls.parser.pathsLengths)
        cls.demand_factory = DemandsFactory(cls.candidate_path_fetcher)

        cls.unicast_demands = cls.demand_factory.get_unicast_demands(cls.parser.unicastDemands)
        cls.anycast_demands = cls.demand_factory.get_anycast_demands(cls.parser.anycastDemands, cls.parser.dataCenters)
Example #37
0
def execute(program_container: ProgramContainer):
    """
    Executes the given program.
    :param program_container: the program container containing the program that shall be executed.
    """
    try:
        try:
            parsed_program = ast_tree_of_intermediate_code(program_container)
        except IntermediateCodeError as b:
            logger.error(b.message())
            parsed_program = Parser.parse(program_container)
            generate_tree_based_intermediate_code(parsed_program, program_container)
        logger.debug('Parsed AST for %s: %s' % (program_container.origin, parsed_program))
        global_env = GlobalEnvironment(name='__main__', file_path=program_container.origin)
        parsed_program.execute(global_env, None)
        logger.debug('Environment after execution: %s' % global_env)
    except (ParserErrors, ExecutionError) as p:
        print(p.message(), file=sys.stderr)
Example #38
0
def main():
    parser = Parser()
    cities = parser.parse_cities("data/city.json")
    flights = parser.parse_flights("data/flight.json")
    customers = parser.parse_customers("data/customer.json")
    articles = parser.parse_articles("data/article.json")
    tags = parser.parse_tags("data/tags.json")

    engine = Engine(customers, articles, tags, flights, cities)

    print_customers(customers, parser.customer_schema)
    cid = dialog_customer(customers, parser.customer_schema)

    ar = engine.articlerating_customer(cid)
    show_best_articles(ar, articles, parser.article_schema)

    return True
Example #39
0
class ParserTests(unittest.TestCase):
	def setUp(self):
		self.parser = Parser()

	def testCorrectSquareBrackets(self):
		self.assertTrue(self.parser.read("[]"))

	def testFalseSquareBrackets(self):
		self.assertFalse(self.parser.read("[]]"))

	def testCorrectMixedBrackets(self):
		self.assertTrue(self.parser.read("{<>}()[{}]"))

	def testFalseMixedBrackets(self):
		self.assertFalse(self.parser.read("{<}()[{}]"))

	def testTokenExistsButOrderIsWrong(self):
		self.assertFalse(self.parser.read("{[}]"))

	def testLargeString(self):
		eval_this = """
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
		"""
		self.assertTrue(self.parser.read(eval_this))

	def testFalseLargeString(self):
		# SyntaxError in line 41
		eval_this = """
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
			{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]{}<>()[]
		"""
		self.assertFalse(self.parser.read(eval_this))
Example #40
0
 def run_interpreter(stream):
     token_stream = Lexer.start(stream)
     parsed_stream = Parser.start(token_stream)
     return Interpreter.simulate_statements(parsed_stream)
Example #41
0
 def setUpClass(cls):
     logging.basicConfig(level=logging.INFO)
     parser = Parser('../test_data', 'euro16_k2.txt')
     parser = parser.get_scenario(0)
     cls.networkTopology = parser.networkTopology
     cls.graph = graph_factory(cls.networkTopology)
Example #42
0
import socket
from subprocess import call
from plugins.cameraled import CameraLED
from parser.parser import Parser

# Accept requests from any IP address on port 50000
TCP_IP = '0.0.0.0'
TCP_PORT = 50000
BUFFER_SIZE = 4096

# Create socket and bind it to TCP address &amp; port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))

led = CameraLED()
parser = Parser();

# Infinite loop
while 1:
    # Listen for a connection
    s.listen(1)
    # Connection found. Accept connection
    conn, addr = s.accept()

    data = conn.recv(BUFFER_SIZE).rstrip()
    tokens = parser.parse(data);
    #print tokens
    l = len(tokens)
    if l > 0 and tokens[0]  == "led":
        if l > 1 and tokens[1] == "on":
          # print "Turn LED on"
Example #43
0
 def test_direct_input_stream(self):
     tokenstream_oracle = TokenStream([InputNumDirect(Integer(0, 111), InputNumCell(3))])
     actual_tokenstream = Parser.start(TokenStream([Integer(0, 111), InputNumCell(3)]))
     self.assertSequenceEqual(actual_tokenstream, tokenstream_oracle)
Example #44
0
        # for each edge add its index to corresponding node
        for key, edge in edges.items():
            nodes[edge.source].add_edge(key)
        return nodes

    logger = logging.getLogger(graph_factory.__name__)
    assert isinstance(network_topology, parser.parser.NetworkTopology)

    graph = Graph()
    logger.debug("filling edges")
    graph.edges = generate_edges(network_topology.matrix_of_net_links)
    logger.debug("filling nodes")
    graph.nodes = generate_nodes(network_topology.matrix_of_net_links, graph.edges)
    logger.info("graph created")
    return graph


if __name__ == "__main__":
    from parser.parser import Parser
    test_parser = Parser('../test/test_data', 'euro16_k2.txt')
    test_parser = test_parser.get_scenario(2)
    test_graph = graph_factory(test_parser.networkTopology)

    for edge in test_graph.edges:
        print test_graph.edges[edge]

    for node in test_graph.nodes:
        print test_graph.nodes[node]

Example #45
0
 def test_nested_condition_stream(self):
     tokentream_oracle = TokenStream([IncrementCell(0), ConditionalStatement(Condition(3), [PointerRight(6), ConditionalStatement(Condition(9), [PointerLeft(12)])]), OutputNumCell(21)])
     actual_tokenstream = Parser.start(TokenStream([IncrementCell(0), Condition(3), PointerRight(6), Condition(9), PointerLeft(12), JumpBack(15), JumpBack(18), OutputNumCell(21)]))
     self.assertSequenceEqual(actual_tokenstream, tokentream_oracle)
Example #46
0
 def test_direct_output_stream(self):
     tokenstream_oracle = TokenStream([OutputNumDirect(Integer(0, 2223), OutputNumCell(4))])
     actual_tokenstream = Parser.start(TokenStream([Integer(0, 2223), OutputNumCell(4)]))
     self.assertSequenceEqual(actual_tokenstream, tokenstream_oracle)
Example #47
0
        return False

    def cool_temperature(self):
        self.temperature *= 1 - self.cooling_rate

    def is_not_cold(self):
        return self.temperature > self.minimal_temperature


if __name__ == "__main__":
    from parser.parser import Parser
    from graph.graph import graph_factory
    from graph.path import CandidatePathsFetcher
    from demand.demand import DemandsFactory
    logging.basicConfig(level=logging.INFO)
    test_parser = Parser('../test/test_data', 'euro16_k2.txt')
    scenario = test_parser.get_scenario(100)
    graph = graph_factory(scenario.networkTopology)
    candidate_path_fetcher = CandidatePathsFetcher(scenario.candidatePaths,
                                                   scenario.networkTopology.n_nodes,
                                                   scenario.pathsLengths)
    demand_factory = DemandsFactory(candidate_path_fetcher)
    sa = SimulatedAnnealing()
    uni_demands = demand_factory.get_unicast_demands(scenario.unicastDemands)
    any_demands = demand_factory.get_anycast_demands(scenario.anycastDemands, scenario.dataCenters)
    demands = uni_demands + any_demands
    sa.run(graph, demands)
    print sa.best_energy
    # for demand in sa.best_solution:
    #     if isinstance(demand, UnicastDemand):
    #         print demand.get_selected_path()
Example #48
0
def main(args):
	parser = Parser()
	print parser.read("<{}<>()")
Example #49
0
	def setUp(self):
		self.parser = Parser()