Beispiel #1
0
def make_interpreter(src):
    lexer = Lexer(src)
    parser = Parser(lexer)
    tree = parser.parse()

    interpreter = Interpreter(tree)
    return interpreter
Beispiel #2
0
    def cogitation(self):
        """ Papy will fill the following fields :
            - palce
            - location
            - maps
            - wiki """

        parser = Parser(self.question)
        self.place = parser.get_place()

        if self.place:
            maps_api = MapsApi(self.place)
            wiki_api = WikiApi(self.place)

            self.location = maps_api.get_location()
            self.wiki = wiki_api.get_wiki()

            if not self.location:
                self.errors.append("NO_LOCATION")

            if not self.wiki:
                self.errors.append("NO_WIKI")

        else:
            self.errors.append("BAD_QUESTION")

            self.location = None
            self.wiki = [None, None]
Beispiel #3
0
class AnimationModel:
    def __init__(self):
        self.parser = Parser()
        self.width = 0
        self.height = 0
        self.frames_num = 0
        self.current_frame = 0
        self.file_path = ""
        self.is_meta_loaded = False

    def set_file_path(self, file_path):
        self.file_path = file_path
        self.parser.set_file(self.file_path)

    def get_info(self):
        self.width, self.height, self.frames_num = self.parser.read_animation_info(
        )
        self.current_frame = 1
        self.is_meta_loaded = True

    def get_frame(self):
        frame = None
        if self.is_meta_loaded:
            frame = self.parser.read_frame(
                self.width, self.height, self.current_frame == self.frames_num)
            if self.current_frame == self.frames_num:
                self.current_frame = 1
            else:
                self.current_frame += 1
        return frame
Beispiel #4
0
 def __init__(self):
     self.parser = Parser()
     self.width = 0
     self.height = 0
     self.frames_num = 0
     self.current_frame = 0
     self.file_path = ""
     self.is_meta_loaded = False
Beispiel #5
0
def main():

    text = open('file2.py', 'r').read()

    lexer = Lexer(text)
    parser = Parser(lexer)
    tree = parser.parse()

    interpreter = Interpreter(tree)
    interpreter.interpret()
Beispiel #6
0
def parse():
    # parse gathered data and save as csv

    logger.info("parse")
    scrapper = Scraper(Persistor)
    parser = Parser()
    raw_data = scrapper.scrape()

    data = []
    for raw in raw_data:
        data = scrapper.append_data(data, raw)
    parsed_files = [parser.parse_object(file) for file in data]
    scrapper.save_csv(parsed_files)
Beispiel #7
0
    def run(self):
        sleep(3)

        turn = 1
        while True:
            is_white = bool(turn % 2)
            moved = False
            final_move = None

            try:
                data_bytes = self.conn.recv(0 if is_white else 1, 3)
                if data_bytes:
                    client_turn, move = Parser.decode(data_bytes)
                    if client_turn == turn:
                        if self.wm.check_move(move, is_white):
                            moved = True
                            final_move = move
            except Exception as err:
                print (err)

            if not moved:
                print ('random move')
                moves = self.wm.all_moves(is_white)
                if len(moves):
                    final_move = choice(moves)

            self.wm.do_move(final_move, is_white)
            self.conn.send2all(Parser.encode(turn, final_move))

            print (self.wm)

            w, b = self.wm.result()
            if w + b == 64:
                if w > b:
                    print ('White wins!')
                elif w < b:
                    print ('Black wins!')
                else:
                    print ('Draw!')
                break

            turn += 1
            sleep(1)


        sleep(6)
        self.conn.disconnect()
Beispiel #8
0
def parse():

    logger.info("parse")
    storage = Persistor(SCRAPPED_FILE)
    parser = Parser()

    raw_data = storage.read_raw_data()

    ind_start = raw_data.find('table class=\"wikitable sortable\"')
    raw_data = raw_data[ind_start:]
    ind_end = raw_data.find('</table>')
    raw_data = raw_data[:ind_end + len('</table>')]

    all_rows = re.findall('<tr[^^]*?</tr>', raw_data)

    parsed_files = [parser.parse_object(raw) for raw in all_rows]
    storage.save_csv(parsed_files, TABLE_FORMAT_FILE)
Beispiel #9
0
    def __init__(self):
        self.terminate_map = {}
        for k, v in Param.terminate_pair.items():
            self.terminate_map[v] = k
        #self.trie = self._constructTrie()
        #self.parser = Parser(self.trie)
        self.db = {}
        self.currentDB = 'defaultdb'
        self.tables = {}
        '''
        self.tables['Employee'] = <'Table' object at ...>
        self.tables['Company'] = <'Table' object at ...>
        '''

        self.parser = Parser()

        self.head_lines = 5
        self.printall = False
        self.return_time = False
Beispiel #10
0
    def __init__(self, string):
        self.orx = 0.0
        self.ory = 0.0
        self.scx = 1.0
        self.scy = 1.0
        self.ang = 0.0
        self.Draw_color = 'k'
        statements = Parser(string)
        # print(len(statements))

        self.analyse(statements)
        self.showPic()
Beispiel #11
0
 def calc(self):
     tokenizer = Tokenizer(self.strcalc)
     tokenizer.tokenize()
     tokens = tokenizer.get_tokens()
     
     parser = Parser(tokens)
     parser.parse()
     parser.calc()
     self.ans = parser.get_ans()               
Beispiel #12
0
def main():
    print('Welcome to IPOL interpreter!')

    # returns lines of string containing the cleaned code
    file_reader = FileReader()
    # tabs removed, double spaces removed
    lines = file_reader.read_file()

    tokenizer = Tokenizer()
    # returns a 2d list containing the tokens per line of code
    tokens_list = tokenizer.tokenize(lines)
    tokens_list_copy = tokens_list.copy()

    # create instance of the parser with the syntax declared in Syntax class
    parser = Parser(syntax=Syntax().get_syntax())

    # iterate each line of the list containing the tokens
    for line in tokens_list:
        recursive_parse(parser, line, callback)


    # create a new instance of the parser now with the syntax for recuding operations to expressions
    parser = Parser(syntax=Syntax().get_final_syntax())

    # Parse to an expression to see if it is valid
    for line in parsed_list:
        recursive_parse(parser, line, callback1)  
        


    exception_checker = ExceptionCheker()

    for i in range(len(final_parsed_list)):
        # there must be a syntax error because it cannot be converted to a single statement
        # check which kind of exception it is
        if len(final_parsed_list[i]) > 1:
            exception = exception_checker.check_exception(
                final_parsed_list[i], i)

            if isinstance(exception, IpolException):
                exceptions.append(exception)

    # now check if the overall structure of the code is valid
    # check if there are unused values
    # for index, token in enumerate(reduce(final_parsed_list)):
    #     if token.type == Type.NUMBER or token.type == Type.STR:
    #         exceptions.append(IpolException(
    #             ExceptionType.UNUSED_VALUE_ERROR, None, index))

    # print exceptions if there are any and halt the build process
    if len(exceptions) > 0:
        for exception in exceptions:
            exception.print()
        return
    else:
        # create a new instance of the parser now with the syntax of the overall ipol code
        parser = Parser(syntax=Syntax().get_ipol_syntax())

        # finally, verify that the full code is valid
        reduced_final_parsed_list = reduce(final_parsed_list)

        # recursive_parse(parser, reduced_final_parsed_list, callback2)
        reduced_final_parsed_list[:] = (token for token in reduced_final_parsed_list \
        if token.type != Type.EMPTY_LINE)

        recursive_parse(parser, reduced_final_parsed_list, callback2)

        for line in ipol_code_verified:
            for token in line:
                print(token.type)

        # check syntax in class Syntax
        # Type.E means accepted
        build_failed_message = 'Build Failed.'
        try:
            if ipol_code_verified[0][0].type == Type.E:
                print('Build Successful\n')
            else:
                print(build_failed_message)
                return
        except:
            print(build_failed_message)
            return

        # there are no exceptions
        # continue with code generation
        tokens_list_copy.pop(0)
        tokens_list_copy.pop(len(tokens_list_copy) - 1)

        generated_code = CodeGenerator().generate(tokens_list_copy)

        # this may return a bool data type
        if isinstance(generated_code, list):
            runnable_code = '\n'.join(generated_code)
            runnable_code = runnable_code.replace('&n0', '')
            # run the generated python code
            with open('ic.py', '+w') as ic:
                ic.write(runnable_code)

            print('\nBuild Complete.\nView logs on ipol_logs.txt\nView generated code on ic.py\n')
            exec(runnable_code, globals())

            with open('ipol_logs.txt', '+w') as logs:
                text_to_write = 'PARSING LOGS\n\nGENERATED TOKENS\n'
                for line in tokens_list:
                    for token in line:
                        text_to_write = text_to_write + '{} -> {}'.format(token.type, token.val) + ", "
                    text_to_write = text_to_write + '\n'

                text_to_write = text_to_write + '\PARSED AS...\n'
                for line in parsed_list:
                    for token in line:
                        text_to_write = text_to_write + str(token.type) + ', '
                    text_to_write = text_to_write + '\n'

                text_to_write = text_to_write + '\nGENERATED INTERMEDIATE CODE\n' + runnable_code
                logs.write(text_to_write)
        # if bool is returned, that means there was something wrong with the ipol code
        else:
            print('Build failed')
Beispiel #13
0
from lexer import Lexer
from myparser import Parser
from codegen import CodeGen

fname = "helloworld.c"
with open(fname) as f:
    text_input = f.read()

lexer = Lexer().get_lexer()
tokens = lexer.lex(text_input)

codegen = CodeGen()

module = codegen.module
builder = codegen.builder
printf = codegen.printf

pg = Parser(module, builder, printf)
pg.parse()
parser = pg.get_parser()
parser.parse(tokens).eval()

codegen.create_ir()
codegen.save_ir("output.ll")
Beispiel #14
0
from mylexer import Lexer
from myparser import Parser
from mypainter import Painter
import os

textBox = None
str = 'FOR T FROM 0 TO 2*PI STEP PI/50 DRAW (cos(T),sin(T));'
Lexer(str, show=True)
Parser(str, show=True)
Painter(str)
Beispiel #15
0
from trie import Trie
from myset import MySet
import AdjMatGraph
import os

from myparser import Parser

parser = Parser()
g = AdjMatGraph.AdjGraph()

vidjeno_strana = 0
step = 5
broj_strana = step
poslednja_pretraga = ''
direktorijum = ''

trie = Trie()

final_arr = []


def unosDirektorijuma():
    while True:
        try:
            global direktorijum
            direktorijum = input(
                "Unesite absolutnu putanju do direktorijuma ili broj 1 za trenutni direktorijum: \n"
            )
            if direktorijum == '1':
                direktorijum = os.getcwd()
                break
Beispiel #16
0
    def test_get_place_bad_question(self):
        """ If the question have not place, Parser will raise an error """

        parser = Parser(self.BAD_QUESTION)

        assert parser.get_place() == None
Beispiel #17
0
def callParser():
    str = textBox.get('1.0', "end")
    # Parser(string, show=False)
    # show=True显示分析过程
    Parser(str, show=True)
Beispiel #18
0
from scanner import Scanner, Token, TokenID
from myparser import Parser
from statSem import *
from compfs import *

parser = Parser("file")
tree = parser.GetTree()
parser.PrintTree(tree, 0)
stack = statSem(tree, Stack())
asm = ASM("file.asm",tree, stack.ids)
asm.run()
asm.file.close()
i = 1
Beispiel #19
0
#7/14/2018
import sys

from myparser import Parser
from symbol_table import SymbolTable
from code import Code

file_path = sys.argv[1]
print('#######')
print(file_path)

#should accept a command line arguments specifying the path to the file you want to assemble
print('Running the assembler...')

#open the .asm file for reading
asm = Parser(file_path + '.asm')

#open another file for writing
hack = open(file_path + '.hack', 'w')

#advance all the way through the file once and collect all the symbols (only the psudo symbols?)

symbol_table = SymbolTable()

l_command_address = 0

#a_command_set = set()
a_command_list = []

while asm.hasMoreCommands():
Beispiel #20
0
    def test_init(self):
        """ Init Parser with the question """

        parser = Parser(self.GOOD_QUESTION)

        assert type(parser.question) == type(self.GOOD_QUESTION)
Beispiel #21
0
items = ['get_company_name','get_company_short_description','get_company_description','get_homepage',
 'get_sector','get_founded','get_business_model','get_amount_raised','get_funding_stage',
 'get_employees','get_products','get_product_stage','get_tags','get_address','get_offices_abroad',
 'get_geographical_markets','get_target_markets','get_patent','get_tim_member_1','get_tim_member_2',
 'get_tim_member_3','get_funding_round_type','get_funding_round_sum','get_funding_round_investor',
 'get_funding_rounds_date','get_funding_round_type_1','get_funding_round_sum_1','get_funding_round_investor_1',
 'get_funding_rounds_date_1','get_facebook','get_twitter','get_linkedin','get_url_source','get_similar_link']
    

with open('urls.txt', 'r') as in_file, open('combined_file.csv', 'w') as outcsv, open('log_file.log', 'w') as log_file:
    writer = csv.writer(outcsv)
    writer.writerow(title)

    for i, url in enumerate(in_file.readlines()):
        try:
            parser = Parser(url)
            result = []
            log = str(i) + ' parse page: ' + url
            print log
            log_file.write(log+'')
            for item in items:
                to_call = getattr(parser, item)
                call = to_call()
                if isinstance(call,tuple):
                    for v in call:
                        result.append((v or '-'))
                else:
                    result.append((call or '-'))
            log_file.write('Done ' +'\n\n')
        except Exception, e:
            print e
Beispiel #22
0
class Core:

    DUMP_DUMPALL = 0

    def __init__(self):
        self.terminate_map = {}
        for k, v in Param.terminate_pair.items():
            self.terminate_map[v] = k
        #self.trie = self._constructTrie()
        #self.parser = Parser(self.trie)
        self.db = {}
        self.currentDB = 'defaultdb'
        self.tables = {}
        '''
        self.tables['Employee'] = <'Table' object at ...>
        self.tables['Company'] = <'Table' object at ...>
        '''

        self.parser = Parser()

        self.head_lines = 5
        self.printall = False
        self.return_time = False

    # W
    def _constructTrie(self):
        trie = Trie()
        # Written in multiple lines instead of loop to prevent dealing with hashing
        trie.insertList(
            tuple(
                map(lambda x: x.lower(), [
                    str(member)
                    for name, member in lh.SQLKeyword.__members__.items()
                ])), self.terminate_map['Keyword'])
        trie.insertList(
            tuple(
                map(lambda x: x.lower(), [
                    str(member)
                    for name, member in lh.SQLFunction.__members__.items()
                ])), self.terminate_map['Function'])
        trie.insertList(
            tuple(
                map(lambda x: x.lower(), [
                    str(member)
                    for name, member in lh.SQLDataType.__members__.items()
                ])), self.terminate_map['DataType'])
        trie.insertList(
            tuple(
                map(lambda x: x.lower(), [
                    str(member)
                    for name, member in lh.SQLOperator.__members__.items()
                ])), self.terminate_map['Operator'])
        return trie

    def _dump_db(self, file_path: str):
        new_db = DB(self.currentDB)
        new_db.tables = self.tables
        self.db[self.currentDB] = new_db
        f = open(file_path, 'wb')
        pickle.dump(self.db[self.currentDB], f)
        f.close()
        return 0

    def _load_db(self, file_path: str):
        with open(file_path, 'rb') as f:
            db_f = pickle.load(f)
        self.db[db_f.name] = db_f
        return 0

    def get_table(self, s: str):
        if s in self.tables:
            return self.tables[s]
        elif s.lower() in self.tables:
            return self.tables[s.lower()]
        elif s.upper() in self.tables:
            return self.tables[s.upper()]
        else:
            return False

    @staticmethod
    def get_table_from_dict(s: str, d: dict):
        if s in d:
            return d[s]
        elif s.lower() in d:
            return d[s.lower()]
        elif s.upper() in d:
            return d[s.upper()]
        else:
            return False

    def handler(self, s: str):
        if self.return_time:
            t = time.time()

        if ';' in s:
            s = s.split(';')
        else:
            s = [s]
        for i in s:
            try:
                d = self.parser.parse(i)
            except:
                PrintException.handlerError()
                return
            if d['type'] == 'select':
                if self.printall:
                    self.execute_select(d).formatout()
                else:
                    self.execute_select(d).head(self.head_lines)
            elif d['type'] == 'delete':
                self.execute_delete(d)
            elif d['type'] == 'insert':
                self.execute_insert(d)
            elif d['type'] == 'create_db':
                self.execute_create_db(d)
            elif d['type'] == 'drop_db':
                self.execute_drop_db(d)
            elif d['type'] == 'create_table':
                self.execute_create_table(d)
            elif d['type'] == 'drop_table':
                self.execute_drop_table(d)
            elif d['type'] == 'create_index':
                self.execute_create_index(d)
            elif d['type'] == 'drop_index':
                self.execute_drop_index(d)
            elif d['type'] == 'use_db':
                self.execute_use_db(d)
            else:
                PrintException.handlerError()
        if self.return_time:
            print('Executing queries takes %s seconds.' % (time.time() - t))

    def execute_select(self, d: dict):
        '''
        d = {
            'query': {
                'select': {
                    'columns': ['a1', 'A2.c'],
                    'aggr_func': [],
                    'distinct': [],
                },
                'from': ['A1', 'A2', 'B1'],
                'where': {
                    'joins': [['A1.a', '=', 'A2.b'], ['A1.a', '>', 'B1.b']],
                    'conditions': [['AND', ['A1.a', '>', '2']], ['OR', ['B1.b', '=', '5']]]
                },
                'groupby': [],
                'orderby': [['A1.a'], True],
            },
            'tables': {
                'A1': 'T',
                'A2': 'T',
                'B1': 'K',
            },
            'columns': {
                'a1': 'A1.a',
                'A2.c': 'A2.c',
            },
        }
        '''
        query = d['query']
        columns = d['columns']
        alias_table = d['tables']

        # MODIFY THIS!!!
        runtime_dict = deepcopy(self.tables)
        # ^^^^

        for name, subquery in d['subquery'].items():
            runtime_dict[name] = self.execute_select(
                self.parser.parse_tokens(subquery))

        joins = query['where']['joins']
        conditions = query['where']['conditions']
        select = query['select']
        if len(query['groupby']) > 0:
            groupby = query['groupby']
        else:
            groupby = None
        if len(query['orderby']) > 0:
            orderby = query['orderby']
        else:
            orderby = None

        if len(joins) == 0:
            # If there is no join, we can assume there is only one table!
            target = d['tables'][query['from'][0]]
            final_table = self.get_table_from_dict(target, runtime_dict)

            #final_table.printall()

        for i in joins:
            '''
            Sample: i = ['A1.a', '=', 'A2.b']
            '''
            #print('\n', i, '\n')

            v1, operator, v2 = i[0].split('.'), i[1], i[2].split('.')
            t1 = alias_table[v1[0]]
            t2 = alias_table[v2[0]]
            flag_t1_unmodified = False
            flag_t2_unmodified = False

            if isinstance(t1, str):
                t1 = deepcopy(self.get_table_from_dict(t1, runtime_dict))
                flag_t1_unmodified = True
            if isinstance(t2, str):
                t2 = deepcopy(self.get_table_from_dict(t2, runtime_dict))
                flag_t2_unmodified = True

            if not flag_t1_unmodified:
                c1 = i[0]
            else:
                c1 = v1[1]

            if not flag_t2_unmodified:
                c2 = i[2]
            else:
                c2 = v2[1]

            # Assign alias
            t1.name = v1[0]
            t2.name = v2[0]

            #print(t1.name, c1, t2.name, c2, v1, operator, v2)

            if flag_t1_unmodified and flag_t2_unmodified:
                ret_table = t1._join(
                    t2, [c1, operator, c2],
                    override_colname=Table.OVERRIDE_COLNAME_BOTH)
            elif (not flag_t1_unmodified) and flag_t2_unmodified:
                ret_table = t1._join(
                    t2, [c1, operator, c2],
                    override_colname=Table.OVERRIDE_COLNAME_LAST)
            elif flag_t1_unmodified and (not flag_t2_unmodified):
                ret_table = t1._join(
                    t2, [c1, operator, c2],
                    override_colname=Table.OVERRIDE_COLNAME_FIRST)
            else:
                ret_table = t1._join(
                    t2, [c1, operator, c2],
                    override_colname=Table.OVERRIDE_COLNAME_NONE)
            # Replace A1, A2 with A1 JOIN A2
            alias_table[v1[0]] = ret_table
            alias_table[v2[0]] = ret_table
            # Assume the last join will return the full joined table (?)
            final_table = ret_table

            #print(alias_table)
            #final_table.printall()

        if final_table == -1:
            raise Exception('')
        T, cur_table = final_table, final_table
        #Sample: i = ['AND', ['A1.a', '>', '3']]
        for i in conditions:

            junction, condition = i[0], i[1]
            condition[2] = float(condition[2])
            if junction.upper() == 'AND':
                cur_table = cur_table._project(condition)
            else:
                new_t = T._project(condition)
                cur_table = cur_table._union(new_t)

        if select['aggr_func']:
            aggr_func = select['aggr_func'][0]
        else:
            aggr_func = select['aggr_func']

        if len(aggr_func) == 0:
            aggr_func = None
        else:
            aggr_func[1] = d['columns'][aggr_func[1]]
        distinct = select['distinct']
        if len(distinct) == 0:
            distinct = None

        #cur_table.printall()
        #print(columns, aggr_func)
        if select['columns'] == ['*']:
            columns = cur_table._col_names
        else:
            columns = [d['columns'][i] for i in select['columns']]

        if cur_table == -1:
            raise Exception('')
        cur_table = cur_table._select(columns,
                                      distinct=distinct,
                                      aggr_func=aggr_func,
                                      orderby=orderby,
                                      groupby=groupby)
        reverse_columns_name_map = {}
        for k, v in d['columns'].items():
            reverse_columns_name_map[v] = k
        cur_table._col_names = list(
            map(
                lambda x: reverse_columns_name_map[x]
                if x in reverse_columns_name_map else x, cur_table._col_names))
        return cur_table

    def execute_delete(self, d):
        '''
        DELETE FROM Employees
        WHERE A = 1

        d = {
            'from': 'Employees',
            'where': ['A', '=', '1']
        }
        '''
        if self.get_table(d['from']):
            target = self.get_table(d['from'])
            if target._foreign_key:
                # A.a REFERENCES B.b
                # B._foreign_key = [['b', 'A', 'a', self.ONDELETE_CASCADE]]
                for fk in target._foreign_key:
                    target_c, ref_t, ref_c, fk_policy = fk[0], self.get_table(
                        fk[1]), fk[2], fk[3]
                    if len(target_c) == 1:
                        target_c = target_c[0]
                    if len(ref_c) == 1:
                        ref_c = ref_c[0]
                    target_fk_loc = target._col_names.index(target_c)
                    ref_fk_loc = ref_t._col_names.index(ref_c)
                    to_d = target._delete(d['where'][0][1], try_d=True)
                    if fk_policy == Table.ONDELETE_NOACTION or fk_policy == Table.ONDELETE_RESTRICT:
                        for _, v in ref_t._tuples.items():
                            for j in to_d:
                                if v[ref_fk_loc] == j[target_fk_loc]:
                                    return -1
                    elif fk_policy == Table.ONDELETE_CASCADE:
                        del_list = []
                        for k, v in ref_t._tuples.items():
                            for j in to_d:
                                if v[ref_fk_loc] == j[target_fk_loc]:
                                    del_list.append(k)
                        for i in del_list:
                            ref_t._tuples.pop(i)
                    elif fk_policy == Table.ONDELETE_SETNULL:
                        del_list = []
                        for k, v in ref_t._tuples.items():
                            for j in to_d:
                                if v[ref_fk_loc] == j[target_fk_loc]:
                                    del_list.append(k)
                        for i in del_list:
                            ref_t._tuples[i][ref_fk_loc] = None
                target._delete(d['where'][0][1])
            else:
                target._delete(d['where'][0][1])
        else:
            raise Exception('')
        return 0

    def execute_update(self, d):
        '''
        UPDATE Employees
        SET A = 1, B = 'a'
        WHERE C = 5

        d = {
            'update': 'Employees',
            'set': [['A', 1], ['B', 'a']]
            'where': ['C', '=', 5]
        }
        '''
        if self.get_table(d['update']):
            target = self.get_table(d['update'])
            target._update(d['set'], d['where'])

    def execute_insert(self, d):
        '''
        INSERT INTO Employees
        VALUES (1, 2, 3)

        d = {
            'insert_into': 'Employees',
            'values': [(1, 2, 3), (4, 5, 6)]
        }
        '''
        if self.get_table(d['insert_into']):
            target = self.get_table(d['insert_into'])
            vals = d['values']
            return target._insert(tuple(vals))
        else:
            raise Exception('')
        return 0

    def execute_create_db(self, d):
        '''
        CREATE DATABASE testdb;

        d = {
             'name': 'testdb',
        }
        '''
        if not d['name'] in self.db:
            self.db[d['name']] = DB(d['name'])
            return 0
        else:
            raise Exception('')

    def execute_use_db(self, d):
        '''
        USE testdb;

        d = {
             'name': 'testdb',
        }
        '''
        if d['name'] in self.db:
            if self.tables:
                # Save to old db
                self.db[self.currentDB].updateTable(self.tables)

            # Go to new db
            self.currentDB = d['name']
            self.tables = self.db[d['name']].tables
        else:
            raise Exception('')
        return 0

    def execute_drop_db(self, d):
        '''
        d = {
             'name': 'testdb',
        }
        '''
        if d['name'] in self.db:
            if d['name'] == self.currentDB:
                self.currentDB = None
                self.tables = {}
            self.db.pop(d['name'])
        else:
            raise Exception('')
        return 0

    def execute_create_index(self, d):
        '''
        {
            'name': index_name, 
            'table': table_name, 
            'columns': []
        }
        '''
        if self.get_table(d['table']):
            t = self.get_table(d['table'])
            for col in d['columns']:
                t._create_index(d['name'], col)
        else:
            raise Exception('')
        return 0

    def execute_drop_index(self, d):
        '''
        {
            'table': table_name, 
            'index': index_name
        }
        '''
        if self.get_table(d['table']):
            self.get_table(d['table'])._drop_index(d['index'])
        else:
            raise Exception('')
        return 0

    def execute_create_table(self, d):
        '''
        d = {
            'name': 'Employees',
            'col_names': ['A', 'B', 'C'],
            'dtype': ['int', 'varchar', 'date'],
            'primary_key': ['A'],
            'foreign_key': ['B', 'Company', 'A', 'CASCADE'],
        }

        If not specified, the last field in d['foreign_key'] can be None.
        '''
        fks = []
        for fk in d['foreign_key']:
            if len(fk) == 4:
                if not fk[3] or fk[3] == 'NOACTION':
                    fks.append([fk[0], fk[1], fk[2], Table.ONDELETE_NOACTION])
                elif fk[3] == 'CASCADE':
                    fks.append([fk[0], fk[1], fk[2], Table.ONDELETE_CASCADE])
                elif fk[3] == 'SETNULL':
                    fks.append([fk[0], fk[1], fk[2], Table.ONDELETE_SETNULL])
                else:
                    raise Exception
            else:
                raise Exception('')

        return self._create_table(d['name'], d['col_names'], d['dtype'],
                                  d['primary_key'], fks)

    def execute_drop_table(self, d):
        '''
        d = {
            'name': 'Employees',
            'if_exist': False,
        }
        '''
        return self._drop_table(d['name'], False)

    def _create_table(self,
                      name: str,
                      col_names: list,
                      dtype: list,
                      primary_key=None,
                      foreign_key=None) -> int:
        '''
        e.g.
        CREATE TABLE Employees (
            A int,
            B varchar(255),
            C date,
            PRIMARY KEY (A),
            FOREIGN KEY (B) REFERENCES Company(A) ON DELETE CASCADE
        )

        is accepted as _create_table('Employees', ['A, 'B', 'C'], ['int', 'varchar', 'date'], primary key = 'A', foreign_key = ['B', 'Company', 'A', Table.ONDELETE_CASCADE]).

        If not specified, the on_delete parameter is set to Table.ONDELETE_NOACTION
        '''
        if name in self.tables:
            PrintException.keyError('_create_table')
            return -1
        if foreign_key:
            for key in foreign_key:
                target = key[1]
                fk = [key[2], name, key[0], key[3]]
                if target in self.tables:
                    self.get_table(target)._addForeignKeyConstraint(fk)
                else:
                    PrintException.keyError()
                    return -1

        self.tables[name] = Table.createTable(name,
                                              col_names=col_names,
                                              dtype=dtype,
                                              primary_key=primary_key)
        return 0

    def _drop_table(self, table_name, if_exist=False):
        if not self.get_table(table_name):
            if if_exist:
                return 0
            else:
                PrintException.keyError()
                return -1
        self.tables.pop(table_name)
        return 0
Beispiel #23
0
def callPainter():
    str = textBox.get('1.0', "end")
    # Parser(string, show=False)
    # paint=True函数绘图
    Parser(str, paint=True)
Beispiel #24
0
    def read_line(cls, line):
        """
            Parses the string and calls the method from table class
        """
        line = Parser.remove_comments(line)
        if line:
            with Timer() as t:
                if ':=' in line:
                    left, right = Parser.splitlr(line)

                    if 'inputfromfile' in right:
                        inp_name = Parser.parse(right, 'input')
                        output = Table(left, inp_name)

                    elif 'select' in right:
                        base_table, args, condition = Parser.parse(
                            right, 'select')

                        output = getattr(
                            cls, base_table).select(
                            args, condition, left)

                    elif 'project' in right:
                        base_table, args = Parser.parse(right, 'project')

                        output = getattr(cls, base_table).project(args, left)

                    elif 'avggroup' in right:
                        base_table, args = Parser.parse(right, 'project')

                        output = getattr(
                            cls, base_table).avg_sum_count_group(
                            args, left)

                    elif 'movavg' in right:
                        base_table, args = Parser.parse(right, 'project')

                        output = getattr(
                            cls, base_table).moving_avg_sum(
                            args, left)
                        setattr(cls, left, output)

                    elif 'avg' in right:
                        base_table, args = Parser.parse(right, 'avg')

                        output = getattr(cls, base_table).avg_sum_count(args)

                    elif 'sumgroup' in right:
                        base_table, args = Parser.parse(right, 'project')

                        output = getattr(
                            cls, base_table).avg_sum_count_group(
                            args, left, 'sum')

                    elif 'movsum' in right:
                        base_table, args = Parser.parse(right, 'project')

                        output = getattr(
                            cls, base_table).moving_avg_sum(
                            args, left, 'sum')

                    elif 'sum' in right:
                        base_table, args = Parser.parse(right, 'avg')

                        output = getattr(
                            cls, base_table).avg_sum_count(
                            args, 'sum')

                    elif 'sort' in right:
                        base_table, args = Parser.parse(right, 'project')
                        output = getattr(cls, base_table).sort(args, left)

                    elif 'join' in right:
                        name1, name2, args, condition = Parser.parse(
                            right, 'join')

                        table1 = getattr(cls, name1)
                        table2 = getattr(cls, name2)

                        output = Table.join(
                            name1, name2, args, condition, left, table1, table2)

                    elif 'concat' in right:
                        args = Parser.parse(right, 'concat')

                        tables = []
                        for a in args:
                            tables.append(getattr(getattr(cls, a), 't'))
                        output = Table.concat(tables, left)

                    elif 'countgroup' in right:
                        base_table, args = Parser.parse(right, 'project')

                        output = getattr(
                            cls, base_table).avg_sum_count_group(
                            args, left, 'count')

                    elif 'count' in right:
                        base_table, args = Parser.parse(right, 'avg')

                        output = getattr(
                            cls, base_table).avg_sum_count(
                            args, 'count')

                    setattr(cls, left, output)

                    cls.f.write(line + '\n')
                    cls.f.write(str(output))
                    cls.f.write('\n')

                else:
                    base_table, col = Parser.parse(line, 'Btree')
                    table = getattr(cls, base_table)
                    if line.startswith('Btree'):
                        table.index_btree(col)

                    elif line.startswith('Hash'):
                        table.index_hash(col)

                    elif line.startswith('outputtofile'):
                        table.output(col)

            print(line, '\nQuery took %.06f sec.\n' % t.interval)
Beispiel #25
0
    def test_get_place(self):
        """ If all is OK, Parser will return the name of the place. """

        parser = Parser(self.GOOD_QUESTION)
        assert parser.get_place() == self.PLACE