Esempio n. 1
0
def test():
    jsonpath = sys.argv[1]
    label = sys.argv[2]
    print "jsonpath:" + jsonpath
    lj = utility.LoadJSON("employee.json")
    j = lj.get_json()
    #print j
    #print x["personalRecord"]["name"]
    la = LexicalAnalyzer(jsonpath)
    token_p = la.token_pair()
    q = Query({"data": j}, token_p)
    print json.dumps(q.execute(), indent=4, sort_keys=True)

    filtered_content = Policy().keep_label(q.execute(), label, [])
    print json.dumps(filtered_content, indent=4, sort_keys=True)
Esempio n. 2
0
    def query(self, path):

        if path == "/":
            #print "returing root obj----------------"
            #print id( self.tree_root)
            return [self.tree_root]
        path_token = LexicalAnalyzer(path).token_pair()
        ini_nodes = [self.tree_root]
        final_nodes = []
        token_pair = path_token

        for tp in token_pair:

            (t1, t2) = tp
            final_nodes = []

            if t1 == "child":
                for root in ini_nodes:
                    # looking into the object
                    for n in root.obj_mem:
                        for (k, v) in n.items():
                            if k == t2:
                                final_nodes.append(v)
                    for n in root.prim_mem:
                        (k, v) = n.items()[0]
                        if k == t2:
                            final_nodes.append(v)
                pass

            elif t1 == "index":
                for root in ini_nodes:
                    t2 = int(t2)
                    try:
                        n = root.array_mem[t2]
                    except:
                        pass
                final_nodes.append(n)
                pass

            elif t1 == "gap":
                for root in ini_nodes:
                    final_nodes = final_nodes + self._gapvalue(t2, root)
                pass
            ini_nodes = final_nodes
        return final_nodes
Esempio n. 3
0
    def nextToken(self):
        if (self.index + 1) < len(self.list_tokens):
            self.index += 1
            #print(self.list_tokens[self.index])
            #self.showStack()
            return self.list_tokens[self.index]
        else:
            sys.exit("out range")

    def syntaxError(self, expected):
        ct = self.getCurrentToken()
        sys.exit(
            'Syntax error, "{}" expected but "{}" found in line {}'.format(
                expected, ct.token, ct.line))

    def getCurrentToken(self):
        #print '[getCurrentToken]: {}'.format(self.list_tokens[self.index])
        return self.list_tokens[self.index]

    def showStack(self):
        print("########")
        for line in traceback.format_stack():
            print(line)
        print("########")


file_path = '../program.txt'
p = open(file_path, "r").read()
lex = LexicalAnalyzer(p).parse()
SyntacticAnalyzer(lex).program()
Esempio n. 4
0
from lexical_analyzer import LexicalAnalyzer
from syntax_analyzer import SyntaxAnalyzer
from parse_table import TableGenerator

# project 1: run lexical analyzer and create token file
print '...Starting Lexical Analyzer'
lex = LexicalAnalyzer()
lex.process_file()
print 'Lexical Analyzer has finished running.\n'

# # project 2: create parse table
# print '...Starting Parse Table generator'
# tbl = TableGenerator()
# tbl.generate_parse_table()
# print 'Parse Table created.\n'

# project 2: run syntax analyzer
print '...Starting Syntax Analyzer'
symbol_table = lex.get_symbol_table()
syn = SyntaxAnalyzer(symbol_table)
print 'Syntax Analyzer has finished running.\n'
Esempio n. 5
0
from lexical_analyzer import LexicalAnalyzer
from syntatic_analyzer import SyntaticAnalyzer

INPUT_FILE = 'input'

if __name__ == '__main__':
    LA = LexicalAnalyzer(INPUT_FILE)
    SA = SyntaticAnalyzer(LA, debug=True)
    ast = SA.analyze()
    if ast: ast.traverse()

    # token, value = LA.get_next_token()
    # while token:
    #     print(f'Token: {token} - value: {value}')
    #     token, value = LA.get_next_token()