Exemple #1
0
def main(link):
    clear()
    while True:
        print('''
\033[1;36;40m=============
Made by SKane
=============

1. Firefox
2. Chrome
3. Exit
            ''')
        try:
            option = int(input('Option: '))
        except:
            print('The option must be an integer!')
            input('Press enter to continue...')
        if option > 3 or option < 1:
            print('Option must be either 1, 2, or 3!')
            input('Press enter to continue...')
        elif option == 3:
            break
        else:
            if option == 1:
                Query.ff(link)
            elif option == 2:
                Query.ch(link)
    def _generate_select_queries(self, table):
        """
        Generates all possible queries between combinations of the numeric_columns
        :param table (Table object): input table for which to generate the queries
        :return: None
        """
        # dict that has for key the formatted numeric column and for value the formatted one
        numeric_columns = self.format_columns(list(table.numeric_columns),
                                              table.df.name)
        select_queries = []
        # first take care of the single column queries
        for formatted_column, column in numeric_columns.items():
            expr = Expression()
            expr.update_cols(column)
            expr.expr_text = column
            query_text = "SELECT {} from {}".format(formatted_column,
                                                    table.df.name)
            query = Query(query_text,
                          expr=expr,
                          type="select_single_column",
                          table=table)
            select_queries.append(query)
        # now compute every possible arithmetic combination of the numeric_columns and create the query
        # go through every subset of the numeric_columns (col_num) and generate all permutations for this subset
        # and combine this subset with all the permutations of the operators to end up with all the possible
        # expressions
        bound = self.complexity_bound
        for col_num in range(2, bound + 1):
            perms = itertools.product(numeric_columns, repeat=col_num)
            ops_permutations = self.get_operators_permutations(col_num - 1)
            for perm in perms:
                for op_perm in ops_permutations:
                    expr_text = ""
                    expr = Expression()
                    for j in range(0, len(perm) - 1):
                        expr_text += perm[j] + op_perm[j]
                        expr.update_cols(numeric_columns[perm[j]])
                        expr.update_ops(op_perm[j])
                        if j == len(perm) - 2:
                            expr_text += perm[j + 1]
                            expr.update_cols(numeric_columns[perm[j + 1]])
                    expr.expr_text = expr_text
                    query_text = "SELECT {} FROM {}".format(
                        expr_text, table.df.name)
                    query = Query(query_text,
                                  expr,
                                  type="select_expression",
                                  table=table)
                    select_queries.append(query)

        self.logger.info("Created {} select queries for table {}".format(
            len(select_queries), table.df.name))
        self.candidate_queries += select_queries
Exemple #3
0
    def getPullRequestUsers(self, perPage=100):

        states = ['MERGED', 'OPEN', 'CLOSED']
        user = self.user
        dc = ControlDir(user.loginUser)  # cria arquivo user

        for state in states:
            pullRequestsConfig = 'first:{}, states:{}'.format(perPage, state)
            cont = 0
            pwdCurrent = dc.userDirectory + '\\' + state
            dc.newDirectory(pwdCurrent)  # adiciona Status ao diretorio
            print(user.loginUser)
            print(state)

            while True:
                query = Query.getMax100CommitForPullRequests(user.loginUser, pullRequestsConfig)

                result = self.requestApiGitHubV4(query)  # Execute the query

                pullRequests = result["data"]["user"]['pullRequests']
                user.numberPullRequest = pullRequests['totalCount']
                for nodeCommit in pullRequests['nodes']:
                    cont += 1
                    print('pull nº ', str(cont) + '/' + str(user.numberPullRequest))
                    self.getAlterateCommitPull(nodeCommit, perPage, state, dc, pwdCurrent)

                endCursorPull = pullRequests['pageInfo']['endCursor']
                asNextPagePull = pullRequests['pageInfo']['hasNextPage']

                if asNextPagePull:
                    pullRequestsConfig = 'first:{}, after:"{}", states:{}'.format(perPage, endCursorPull, state)
                else:
                    break
Exemple #4
0
    def getUserRepositoryCommit(self, arrayRepository, numPage=100):
        arrayCommits = []
        for repository in arrayRepository:
            print(repository.nameWithOwner)
            owner, name = repository.nameWithOwner.split('/')
            queryVariables = {
                "numPageIssues": numPage,
                "idUser": self.user.id,
                "owner": owner,
                "name": name
            }
            after = ''
            while True:
                query = Query.repCommit(after)
                resp = self.requestApiGitHubV4(query, queryVariables)
                print(resp)
                if not resp['data']['repository']['defaultBranchRef']:
                    break

                resp = resp['data']['repository']['defaultBranchRef']['target']['history']
                numCommit = resp['totalCount']
                print(numCommit)
                for commit in resp['nodes']:
                    arrayCommits.append(Commit(self.user, repository, numCommit, commit))

                if not resp['pageInfo']['hasNextPage']:
                    break
                after = resp['pageInfo']['endCursor']
        return arrayCommits
Exemple #5
0
    def getUserRepositoryIssues(self, numPage=100):
        queryVariables = {
            "numPageIssues": numPage,
            "owner": "sstephenson",
            "name": "sprockets"
        }

        query = Query.repIssues()
        resp = self.requestApiGitHubV4(query, queryVariables)
        print(resp)
Exemple #6
0
def main(db_name):
    """creates database with tables
    """
    conn = sqlite3.connect(db_name)
    cursor = conn.cursor()
    conf = Fconfig('src/config.ini')
    table_list = conf.get_config('tables')['name'].split()
    for table in table_list:
        fields = conf.get_table_fields(table)
        query = Query.get_create_qry(table, fields)
        cursor.execute(query)
    conn.close()
def main(db_name):
    """creates database with tables
    """
    conn = sqlite3.connect(db_name)
    cursor = conn.cursor()
    conf = Fconfig('src/config.ini')
    table_list = conf.get_config('tables')['name'].split()
    for table in table_list:
        fields = conf.get_table_fields(table)
        query = Query.get_create_qry(table, fields)
        cursor.execute(query)
    conn.close()
Exemple #8
0
    def getUserInf(self):
        queryVariables = {
            "nameUser": self.user.loginUser,
        }
        query = Query.userPerfilInfo()
        info = self.requestApiGitHubV4(query, queryVariables)
        self.user.name = info["data"]["user"]["name"]
        self.user.id = info["data"]["user"]["id"]
        self.user.email = info["data"]["user"]["email"]
        self.user.avatarUrl = info["data"]["user"]["avatarUrl"]
        self.user.bio = info["data"]["user"]["bio"]
        self.user.watching = info["data"]["user"]["watching"]["totalCount"]
        self.user.followers = info["data"]["user"]["followers"]["totalCount"]
        self.user.following = info["data"]["user"]["following"]["totalCount"]
        self.user.location = info["data"]["user"]["location"]
        self.user.createdAt = info["data"]["user"]["createdAt"]
        self.user.company = info["data"]["user"]["company"]
        self.user.issues = info["data"]["user"]["issues"]["totalCount"]
        self.user.organizationTotal = info["data"]["user"]["organizations"]["totalCount"]
        self.user.projects = info["data"]["user"]["projects"]["totalCount"]
        self.user.gists = info["data"]["user"]["gists"]["totalCount"]
        self.user.pullRequests = info["data"]["user"]["pullRequests"]["totalCount"]
        self.user.commitComments = info["data"]["user"]["commitComments"]["totalCount"]
        self.user.issueComments = info["data"]["user"]["issueComments"]["totalCount"]
        self.user.gistComments = info["data"]["user"]["gistComments"]["totalCount"]

        namesOrganiztion = [name["name"] for name in info["data"]["user"]["organizations"]["nodes"]]
        while info["data"]["user"]["organizations"]["pageInfo"]["hasNextPage"]:
            queryVariables = {
                "nameUser": self.user.loginUser,
                "after": info["data"]["user"]["organizations"]["pageInfo"]["endCursor"]
            }
            info = self.requestApiGitHubV4(Query.getOrganizationremnant(), queryVariables)
            namesOrganiztion += [name["name"] for name in info["data"]["user"]["organizations"]["nodes"]]

        self.user.organizations = namesOrganiztion
    def _generate_existence_queries(self, table):
        """
        Generate existence queries for the input table. i.e for each table, generate queries that return each cell
        :param table (Table object): input table for which to generate the queries
        :return: None
        """
        existence_queries = []
        columns = list(table.df.columns)
        rows = list(table.df.row_name)
        for column in columns:
            query_text = "SELECT t.'{}' from {} as t".format(
                column, table.df.name)
            query = Query(query_text, type="existence", table=table)
            existence_queries.append(query)

        self.logger.info("Created {} existence queries for table {}".format(
            len(existence_queries), table.df.name))
        self.candidate_queries += existence_queries
Exemple #10
0
    def make_query(self, query_text):
        query_object = Query(self.counter_queries, query_text)
        entities_found = self.text_to_ent_idx(query_text)
        query_object.add_entities(entities_found)
        doc_similarities = self.find_similarities(query_text)
        query_object.add_similarities(doc_similarities)
        self.queries.append(query_object)
        self.counter_queries += 1
        self.graph_instance.add_ent_to_doc(query_object, type='QUERY')
        self.graph_instance.add_similarity_to_query(query_object, type='QUERY')

        print("SiMILARITEISASAS", query_object.similarities)
        """
Exemple #11
0
    def getUserInfByYear(self):
        dateCreated = self.user.createdAt.split("-")
        yearCreated = int(dateCreated[0])
        monthCreated = int(dateCreated[1])
        flagMonth = True

        todayDate = str(date.today()).split('-')
        todayYear = int(todayDate[0])
        todayMonth = int(todayDate[1])
        userYearInfo = {}

        while yearCreated <= todayYear:
            yearCreated = yearCreated

            if flagMonth:
                month = monthCreated
                flagMonth = False
            else:
                month = 1

            userMonthinfo = {}
            print(yearCreated)
            while True:
                if month > 12 or (yearCreated == todayYear and month > todayMonth):
                    break

                monthAux = str(month)
                monthAux = (str(0) + monthAux) if month < 10 else monthAux
                queryVariables = {
                    "nameUser": self.user.loginUser,
                    "fromDate": '{}-{}-01T04:00:00Z'.format(yearCreated, monthAux),
                    "toDate": '{}-{}-31T23:59:59Z'.format(yearCreated, monthAux),
                }
                query = Query.userInfoContributionsCollection()
                userMonthinfo[month] = self.requestApiGitHubV4(query, queryVariables)["data"]["user"]["contributionsCollection"]
                month += 1

            userYearInfo[yearCreated] = userMonthinfo
            yearCreated += 1

        return userYearInfo
Exemple #12
0
    def getAlterateCommitPull(self, nodeCommit, perPage, state, dc, pwdCurrent):
        allPatchs = []
        totalCommit = nodeCommit['commits']['totalCount']
        numberPull = nodeCommit['commits']['nodes'][0]['resourcePath'].split('/')[4]

        if totalCommit < perPage:  # no caso de mais 100 commits em um pullrequest
            allPatchs = nodeCommit['commits']['nodes']
        else:
            commitInfo = 'first:{}'.format(perPage)
            for i in range(math.ceil(totalCommit / perPage)):  # arredonda flutuante pra cima (interio)
                try:
                    query = Query.getAllCommitForPullRequest(self.__user.loginUser, numberPull, commitInfo, state)
                    result = self.requestApiGitHubV4(query)

                    endCursor = \
                        result['data']['user']['pullRequests']['nodes'][0]['repository']['pullRequest']['commits'][
                            'pageInfo']['endCursor']
                    commitInfo = 'first:{}, after:"{}"'.format(perPage, endCursor)
                    allPatchs += \
                        result['data']['user']['pullRequests']['nodes'][0]['repository']['pullRequest']['commits'][
                            'nodes']
                except:
                    # api v4 nao econtra alguns pulls closed
                    pass

        for i, node in enumerate(allPatchs):
            print('\tCommit', str(i + 1) + '/' + str(totalCommit))
            url = 'https://github.com' + node['resourcePath'] + '.patch'

            pwd = pwdCurrent + '\\' + url.split('/')[6]
            if i == 0:
                dc.newDirectory(pwd)

            req = self.performRequest(url)  # obtem raw alteração

            if req:
                file = open(pwd + '\\' + node['commit']['abbreviatedOid'] + '.txt', 'w', encoding="utf-8")
                file.write(req.text)
                file.close()
    def _generate_percentage_decrease_queries(self, table):
        """
        Generate percentage decrease queries for each column combination. I.e for columns [col1, col2, col3] generate
        percentage of decrease of [col1, col2], [col1, col3] and [col2, col3] for each of their rows.
        :param table (Table object): input table for which to generate the queries
        :return: None
        """
        percentage_decrease_queries = []
        columns = list(table.df.columns)
        rows = list(table.df.row_name)
        column_tuples = self._get_column_tuples(columns)
        for column_tuple in column_tuples:
            query_text = "SELECT 100.0*(t.'{}' - t.'{}')/t.'{}' from {} as t".format(
                column_tuple[0], column_tuple[1], column_tuple[1],
                table.df.name)
            query = Query(query_text, type="percentage_decrease", table=table)
            percentage_decrease_queries.append(query)

        self.logger.info(
            "Created {} percentage decrease queries for table {}".format(
                len(percentage_decrease_queries), table.df.name))
        self.candidate_queries += percentage_decrease_queries
Exemple #14
0
    def repositoryUser(self, nameUser, numPage=80):
        queryVariables = {
            "nameUser": nameUser,
            "numPage": numPage
        }
        RepositoryAffiliation = {'OWNER': [], 'COLLABORATOR': []}
        # RepositoryAffiliation = {'OWNER': [], 'COLLABORATOR': []}
        for repAff in RepositoryAffiliation.keys():
            queryVariables["RepositoryAffiliation"] = repAff
            after = ''
            c=0
            while True:
                query = Query.repInfo(after)
                resp = self.requestApiGitHubV4(query, queryVariables)
                for rep in resp['data']['user']['repositories']['nodes']:
                    RepositoryAffiliation[repAff].append(Repository(repAff, self.user, rep))

                if not resp['data']['user']['repositories']['pageInfo']['hasNextPage']:
                    break
                after = resp['data']['user']['repositories']['pageInfo']['endCursor']
            break

        return RepositoryAffiliation['OWNER'], RepositoryAffiliation['COLLABORATOR']
Exemple #15
0
def replay_extracted_frames(extracted_frames, main_frame):
    # query index, we store it here so we have it for reference in the parsing error popup
    query_index = 1
    try:
        # disable the start button and the input query text area while the replay is running
        main_frame.start_button.config(state="disabled")
        main_frame.query_input.config(state="disabled")

        # create our user queries by parsing the text area content, and create a query manager as well
        user_queries = []
        user_queries_text = main_frame.query_input.get("1.0", END).strip()
        for user_query_text in user_queries_text.split("\n\n"):
            user_queries.append(Query(user_query_text))
            query_index += 1
        query_manager = QueryManager(main_frame)
        for query in user_queries:
            query_manager.add_query(query)

        # go through every frame of the objects
        for frame_index in range(0, len(extracted_frames) - 1):
            # move the ball on the screen
            if constants.FRAME_BALL in extracted_frames[frame_index]:
                main_frame.move_ball(
                    position_to_screen_coord(
                        extracted_frames[frame_index][constants.FRAME_BALL]))

            # move the players on the screen
            main_frame.move_players(
                extracted_frames[frame_index][constants.FRAME_PLAYER])

            # parse the current message
            query_manager.add_message(extracted_frames[frame_index])

            # wait between frames, for the difference of time between them
            sleep(extracted_frames[frame_index + 1][constants.FRAME_TIME] -
                  extracted_frames[frame_index][constants.FRAME_TIME])

            # update the timer as well to reflect the time passed since the game started
            main_frame.set_time(extracted_frames[frame_index +
                                                 1][constants.FRAME_TIME])
    except QueryParseException as exception:
        # if a query could not be parsed, display it as a popup message with the error itself
        messagebox.showwarning(
            "Input query #" + str(query_index) + " format error",
            str(exception))
    finally:
        # regardless of the queries being parsed or not, enable the start button and the query input as
        # the replay finishes
        main_frame.start_button.config(state="normal")
        main_frame.query_input.config(state="normal")

        # move the ball back to the center of the screen (first frame's position)
        main_frame.move_ball(
            position_to_screen_coord(
                extracted_frames[0][constants.FRAME_BALL]))

        # move the players back to the first frame's position
        main_frame.move_players(extracted_frames[0][constants.FRAME_PLAYER])

        # reset the timer to 00:00
        main_frame.set_time(0)

        # clear the query output
        main_frame.query_output.config(state="normal")
        main_frame.query_output.delete('1.0', END)
        main_frame.query_output.config(state="disabled")
from src.query import Query
from src.table import Table
from src.bits import Bits
table = Table("test", 5, 0)

query = Query(table)
# record1 = [1, 90, 0, 0, 0]
# record2 = [2, 91, 0, 0, 0]
# record3 = [3, 92, 0, 0, 0]
query.insert(1, 90, 0, 0, 0)
query.insert(2, 91, 0, 0, 0)
query.insert(3, 92, 0, 0, 0)

r1 = table.get(1, Bits('11111'))
r2 = table.get(2, Bits('11111'))
r3 = table.get(3, Bits('11111'))
print(r1)
print(r2)
print(r3)

query.delete(1)
query.delete(2)
print("record3's LID: ", table.key_lid[3])
Exemple #17
0
def parse_foldrec(foldrec_file, metafold_dict):
    """
        Extracts the score, the template name, the query and template sequences for
        the first n alignments from a file containing N profil-profil alignments and
        creates a list of Alignment objects. It also gives the template's pdb file name
        and gets all the coordinates of the CA atoms in the template's Residue list.

        Args:
            foldrec_file (str): The file containing N profil-profil alignments and their
                                corresponding scores.
            metafold_dict (dict): A dictionary with key = template name
                                        and value = pdb file.

        Returns:
            dict: A dictionary with key = template name and value = an Alignment object.
    """
    # Regex :
    num_reg = re.compile("^No\\s*([0-9]+)")
    template_name_reg = re.compile("^Alignment :.*vs\\s+([A-Za-z0-9-_]+)")
    score_reg = re.compile("^Score :\\s+([-0-9\\.]+)")
    query_seq_reg = re.compile(
        "^Query\\s*([0-9]+)\\s*([A-Z0-9-]+)\\s*([0-9]+)")
    template_seq_reg = re.compile("^Template\\s*[0-9]+\\s*([A-Z-]+)")
    empty_query_reg = re.compile("^Query\\s+\\d\\s-+\\s+\\d.*$")

    alignment_dict = {}
    count_templates = 0

    with open(foldrec_file, "r") as file:
        query_reg_count = 0
        template_reg_count = 0
        for line in file:
            # Search a regex for the current line :
            num_found = re.search(num_reg, line)
            template_name_found = re.search(template_name_reg, line)
            score_found = re.search(score_reg, line)
            query_seq_found = re.search(query_seq_reg, line)
            template_seq_found = re.search(template_seq_reg, line)
            empty_query_found = re.search(empty_query_reg, line)
            # A num is found :
            if num_found:
                num = int(num_found.group(1))
            # A template name is found :
            if template_name_found:
                # These templates have more than 1 chain, so we must skip them
                if template_name_found.group(1) in [
                        "bromodomain", "rhv", "Peptidase_A6", "ins",
                        "Arg_repressor_C", "SAM_decarbox", "prc",
                        "Chorismate_mut"
                ]:
                    # We skip the alignment
                    for _ in range(10):
                        next(file)
                template_name = template_name_found.group(1)
            # A score is found :
            if score_found:
                score = float(score_found.group(1))
            # Empty alignement (the query = gaps only)
            if empty_query_found and query_reg_count == 2:
                print("Skipping alignement " + str(num) +
                      ". The query is only composed of gaps.")
                # We skip the alignment
                for _ in range(5):
                    next(file)
            # A query sequence is found :
            if query_seq_found:
                if query_reg_count == 0:
                    query_first = int(query_seq_found.group(1))
                    query_seq = [
                        Residue(name)
                        for name in list(query_seq_found.group(2))
                    ]
                    query_last = int(query_seq_found.group(3))
                    query_reg_count += 1
                elif query_reg_count == 1:
                    for ind, sec_struct in enumerate(
                            list(query_seq_found.group(2))):
                        query_seq[ind].secondary_struct = sec_struct
                    query_reg_count += 1
                elif query_reg_count == 2:
                    for ind, ss_conf in enumerate(
                            list(query_seq_found.group(2))):
                        if ss_conf != "-":
                            ss_conf = int(ss_conf)
                        query_seq[ind].ss_confidence = ss_conf
                    query_reg_count = 0
            # A template sequence is founds :
            if template_seq_found:
                if template_reg_count == 0:
                    template_seq = [
                        Residue(name)
                        for name in list(template_seq_found.group(1))
                    ]
                    template_reg_count += 1
                elif template_reg_count == 1:
                    for ind, sec_struct in enumerate(
                            list(template_seq_found.group(1))):
                        template_seq[ind].secondary_struct = sec_struct
                    template_reg_count = 0
                    # Add a new alignment object in the list :
                    ali = Alignment(num, score,
                                    Query(query_seq, query_first, query_last),
                                    Template(template_name, template_seq))
                    ali.template.set_pdb_name(metafold_dict)
                    ali.template.parse_pdb("data/templates/" +
                                           ali.template.name + "/" +
                                           ali.template.pdb + ".atm")
                    alignment_dict[template_name] = ali
                    count_templates += 1
    return alignment_dict
db = Database()
db.open('~/ECS165')
grades_table = db.create_table('Grades', 5, 0)

keys = []
records = {}
num_threads = 2
seed(8739878934)

# Generate random records
for i in range(0, 10000):
    key = 92106429 + i
    keys.append(key)
    records[key] = [key, 0, 0, 0, 0]
    q = Query(grades_table)
    q.insert(None, False, False, *records[key])

# create TransactionWorkers
transaction_workers = []
for i in range(num_threads):
    transaction_workers.append(TransactionWorker([]))

# generates 10k random transactions
# each transaction will increment the first column of a record 5 times
for i in range(1000):
    k = randint(0, 2000 - 1)
    transaction = Transaction()
    for j in range(5):
        key = keys[k * 5 + j]
        q = Query(grades_table)
Exemple #19
0
from src.db import Database
from src.query import Query
#from src.config import init

from random import choice, randint, sample, seed
from colorama import Fore, Back, Style

# Student Id and 4 grades
#init()
db = Database()
grades_table = db.create_table('Grades', 5, 0)
query = Query(grades_table)

records = {}

seed(3562901)

for i in range(0, 1000):
    key = 92106429 + randint(0, 9000)
    while key in records:
        key = 92106429 + randint(0, 9000)
    records[key] = [
        key,
        randint(0, 20),
        randint(0, 20),
        randint(0, 20),
        randint(0, 20)
    ]
    query.insert(*records[key])
    print('inserted', records[key])
Exemple #20
0
 def getUserCommitContribution(self):
     query = Query.userCommitContribution()
     return self.requestApiGitHubV4(query)
Exemple #21
0
from src.db import Database
from src.query import Query

from random import choice, randint, sample, seed

# Student Id and 4 grades
db = Database()
db.open('~/ECS165')
grades_table = db.get_table('Grades')
query = Query(grades_table)

# repopulate with random data
records = {}
seed(3562901)
for i in range(0, 1000):
    key = 92106429 + i
    records[key] = [
        key,
        randint(0, 20),
        randint(0, 20),
        randint(0, 20),
        randint(0, 20)
    ]
keys = sorted(list(records.keys()))
for _ in range(10):
    for key in keys:
        for j in range(1, grades_table.num_columns):
            value = randint(0, 20)
            records[key][j] = value
keys = sorted(list(records.keys()))
# for key in keys:
Exemple #22
0
 def v(self, *args):
     query = Query(self)
     query.add('vertex', list(args))
     return query
Exemple #23
0
from src.db import Database
from src.query import Query
from time import process_time
from random import choice, randrange

# Student Id and 4 grades
db = Database()
grades_table = db.create_table('Grades', 0, 5)
query = Query(grades_table)
keys = []

# Measuring Insert Performance
insert_time_0 = process_time()
for i in range(0, 10000):
    query.insert(906659671 + i, 93, 0, 0, 0)
    keys.append(906659671 + i)
insert_time_1 = process_time()

print("Inserting 10k records took:  \t\t\t", insert_time_1 - insert_time_0)

# Measuring update Performance
update_cols = [
    [randrange(0, 100), None, None, None, None],
    [None, randrange(0, 100), None, None, None],
    [None, None, randrange(0, 100), None, None],
    [None, None, None, randrange(0, 100), None],
    [None, None, None, None, randrange(0, 100)],
]

update_time_0 = process_time()
for i in range(0, 10000):
db = Database()
db.open('~/ECS165')
grades_table = db.create_table('Grades', 5, 0)

keys = []
records = {}
seed(3562901)
num_threads = 2

# Generate random records
for i in range(0, 10000):
    key = 92106429 + i
    keys.append(key)
    records[key] = [key, 0, 0, 0, 0]
    q = Query(grades_table)
    q.insert(None, False, False, *records[key])

print("DB created")
# Create transactions and assign them to workers
transactions = []
transaction_workers = []
for i in range(num_threads):
    transaction_workers.append(TransactionWorker())


for i in range(100):
    key = choice(keys)
    record = records[key]
    c = record[1]
    transaction = Transaction()