Beispiel #1
0
def create_new_db(event):
    global db_name
    name = new_db_entry.get()
    db_name = f"{name}.db"

    if os.path.exists(db_name):
        showerror("Incorrect db name", "This database already exists")
    else:
        backend.create_table(db_name)
        main_window.title(f"My library = {db_name}")
        create_db_window.destroy()
        db_choose.withdraw()
        main_window.deiconify()
Beispiel #2
0
def main():
    # setting a terminator variable for while loop
    run = 1

    # creating database
    backend.create_table()

    # while loop to run the program until the selection of the exit option
    while run:
        # printing the the options
        print('\n')
        print('1. Insert new task in todo list \n'
              '2. View the todo list \n'
              '3. Delete the task \n'
              '4. Exit \n')
        # taking input of the option
        x = input("Choose any option: ")

        # selecting the action based on the option
        if x == "1":
            # taking the input to add the new task
            task = str(input("Enter your task: "))

            # entering data in database
            backend.data_entry(task)
        elif x == "2":
            # printing the data
            backend.printData()
        elif x == "3":
            # taking the index input to delete the task
            indexToDelete = int(
                input(
                    "\nenter the index of the task that you want to delete: "))

            # deleting the task
            backend.deleteTask(indexToDelete)
        elif x == "4":
            # setting the terminator variable to terminate the loop
            run = 0
        else:
            # if the user will not choose valid option then it will show this
            print("Pls Enter Valid Option!")

    #closing the connection from the database
    backend.closeCursor()
Beispiel #3
0
def main():
    print(str(datetime.datetime.now()) + '; ', end=" ")  # log time.

    reddit_rss_url = "https://reddit.com/.rss"

    # Connect to SQL server.
    try:
        connection = backend.get_connection()
        cursor = connection.cursor()
        print("PostgreSQL connection is opened; ", end=" ")

        # Create table if it doesn't already exists.
        if not backend.is_table_exists(cursor, TABLE_NAME):
            backend.create_table(cursor, TABLE_NAME)

        # Scrape every subreddit of posts that reach front page.
        post_id = ""
        subreddits = backend.get_subreddits(cursor, "top_posts")
        subreddits_counter = 0
        for subreddit in subreddits:
            #print(str(subreddits_counter) + ": scraping " + subreddit[0] + ".")
            subreddits_counter += 1
            rank_counter = 0
            # Call function to get entries using rss url.
            entries = get_entries("https://reddit.com/r/" + subreddit[0] +
                                  "/.rss")
            # Skip unsuccessful return value, such as /r/FBIOpenUp.
            if entries is None:
                continue
            # Loop through each entry to scrape content.
            for entry in entries:
                author_name = "/u/[deleted]"
                if len(entry.findAll("name")) > 0:
                    author_name = entry.findAll("name")[0].text
                category = entry.findAll("category")[0]['term']
                post_id = entry.findAll("id")[0].text
                link = entry.findAll("link")[0]['href']
                updated = entry.findAll("updated")[0].text
                title = entry.findAll("title")[0].text.replace(
                    "'", "''"
                )  # this is how you escape the single quote for psycopg2
                content = entry.findAll("content")[0].text.replace("'", "''")

                # --- Debug Block ---
                # print("~~~Entry start~~~~~~~~~~~~~~~~~~~~~")
                # print("author_name = " + author_name)
                # print("category = " + category)
                # print("post_id = " + post_id)
                # print("link = " + link)
                # print("updated = " + updated)
                # print("title = " + title)
                # print("content = " + content)
                # print("~~~Entry end~~~~~~~~~~~~~~~~~~~~~~~\n")

                # Add entry as a record into table.
                backend.modify_record(cursor, TABLE_NAME, post_id,
                                      rank_counter + 1, category, title, link,
                                      author_name, updated, content)
                rank_counter = rank_counter + 1
            # Commit updates to database every 25 subreddits.
            if (subreddits_counter % 25 == 0):
                #print("Committing to database now.***************")
                connection.commit()
        # Do a final commit to database after all subreddits.
        #print("Committing to database now.***************")
        connection.commit()

    except (Exception, psycopg2.Error) as error:
        print("Error while connecting to PostgreSQL; ", error, end=" ")
    finally:
        # Close database connection.
        backend.close_connection(connection)
    print(str(datetime.datetime.now()) + '; ', end=" ")  # log time.
    print("Done with scrape;")
Beispiel #4
0
def main():
    # initialize database with the provided tables
    optimize = False
    init_start = time.time()
    initializer()
    init_end = time.time()
    print("initialization time: ", init_end - init_start)

    # start infinite input loop
    keep_looping = True
    while keep_looping:
        try:
            # take line of input
            inp_line = input("\n>>>\t")

            # check for loop termination and optimization settings
            if inp_line == "exit":
                keep_looping = False
            elif inp_line == "optimize_on":
                optimize = True  # turn off pretty output
            elif inp_line == "optimize_off":
                optimize = False  # turn on pretty output
            else:
                # call parser on line of input and start query timer
                start = time.time()
                parsed_obj = parser_main(inp_line)

                # determine command type and call appropriate functions
                if type(parsed_obj) is Query:
                    # call optimizer on query output from the parser
                    attr_list, resulting_relation, explain_string = optimizer(
                        parsed_obj)

                    # output resulting relation
                    if optimize:
                        print(resulting_relation)
                    else:
                        print(
                            t.tabulate(resulting_relation,
                                       headers=attr_list,
                                       tablefmt="pretty"))

                    print("\nExplain:", explain_string, "\n")

                elif type(parsed_obj) is DML:
                    # call appropriate DML function
                    if parsed_obj.insert:
                        dml_insert(parsed_obj)
                    elif parsed_obj.delete:
                        dml_delete(parsed_obj)
                    else:  # update
                        dml_update(parsed_obj)
                else:
                    # call appropriate DDL function
                    if parsed_obj.create:
                        if parsed_obj.table:
                            # create table
                            create_table(parsed_obj.table_name,
                                         parsed_obj.attr)

                            # additional adjustments to the table to avoid the need to pass in the entire object.  Set up relational integrity and NOW functionality
                            TABLES[
                                parsed_obj.
                                table_name].primary_key = parsed_obj.primary_key
                            TABLES[
                                parsed_obj.
                                table_name].foreign_key = parsed_obj.foreign_key

                            # if now specified, append to relation schema
                            if parsed_obj.now:
                                TABLES[parsed_obj.table_name].now = True
                                a = Attribute()
                                a.name = "now"
                                a.type = "str"
                                TABLES[
                                    parsed_obj.table_name].attributes.append(a)
                                TABLES[parsed_obj.
                                       table_name].attribute_names.add("now")
                                TABLES[
                                    parsed_obj.table_name].num_attributes += 1
                                TABLES[parsed_obj.table_name].storage.attr_loc[
                                    "now"] = TABLES[
                                        parsed_obj.
                                        table_name].num_attributes - 1

                            # if foreign key is being set, need to update table being referenced as well
                            if len(parsed_obj.foreign_key) > 0:
                                foreign_table = parsed_obj.foreign_key[1]
                                foreign_attr = parsed_obj.foreign_key[2]
                                child_attr = parsed_obj.foreign_key[0]
                                child_table = parsed_obj.table_name
                                TABLES[foreign_table].child_tables.append(
                                    (foreign_attr, child_table, child_attr))
                        else:
                            # create index on specified table
                            create_index(TABLES[parsed_obj.table_name],
                                         parsed_obj.index_name,
                                         parsed_obj.attr[0])
                    else:
                        if parsed_obj.table:
                            # drop table
                            drop_table(TABLES[parsed_obj.table_name])
                        else:
                            # drop index on specified table
                            delete_index(TABLES[INDEX[parsed_obj.index_name]],
                                         parsed_obj.index_name)

                # end query timer
                end = time.time()
                print("Query durations (sec): ", end - start)
        # allow recovery from errors
        except ValueError:
            pass
def main():
    num_entries_remaining_to_scrape = 100  # decrease if want only higher quality; increase if want to scrape low quality or r/new.
    print(str(datetime.datetime.now()) + '; ', end=" ")  # log time.
    reddit_rss_url = "https://reddit.com/.rss"

    # Connect to SQL server.
    try:
        connection = backend.get_connection()
        cursor = connection.cursor()
        print("PostgreSQL connection is opened; ", end=" ")

        # Create table if it doesn't already exists.
        if not backend.is_table_exists(cursor, TABLE_NAME):
            backend.create_table(cursor, TABLE_NAME)

        # Loop to scrape reddit entries.
        post_id = ""
        rank_counter = 0
        while num_entries_remaining_to_scrape > 0:
            # Call function to get entries using rss url.
            if rank_counter == 0:
                entries = get_entries(reddit_rss_url)
            else:
                # If we need to go the next page, then append '?after=' + post_id of last entry of this page
                entries = get_entries(reddit_rss_url + '?after=' + post_id)
            # Loop through each entry to scrape content.
            for entry in entries:
                author_name = entry.findAll("name")[0].text
                category = entry.findAll("category")[0]['term']
                post_id = entry.findAll("id")[0].text
                link = entry.findAll("link")[0]['href']
                updated = entry.findAll("updated")[0].text
                title = entry.findAll("title")[0].text.replace(
                    "'", "''"
                )  # this is how you escape the single quote for psycopg2
                content = entry.findAll("content")[0].text.replace("'", "''")

                # --- Debug Block ---
                # print("~~~Entry start~~~~~~~~~~~~~~~~~~~~~")
                # print("author_name = " + author_name)
                # print("category = " + category)
                # print("post_id = " + post_id)
                # print("link = " + link)
                # print("updated = " + updated)
                # print("title = " + title)
                # print("content = " + content)
                # print("~~~Entry end~~~~~~~~~~~~~~~~~~~~~~~\n")

                # Add record into table.
                backend.modify_record(cursor, TABLE_NAME, post_id,
                                      rank_counter + 1, category, title, link,
                                      author_name, updated, content)
                rank_counter = rank_counter + 1
            num_entries_remaining_to_scrape = num_entries_remaining_to_scrape - len(
                entries)

        connection.commit()  # commit update to sql database
        print("committed successfully; ", end=" ")

    except (Exception, psycopg2.Error) as error:
        print("Error while connecting to PostgreSQL; ", error, end=" ")
    finally:
        # Closing database connection.
        backend.close_connection(connection)
    print("Done with scrape;")