Exemple #1
0
def main(settings=settings):
    """
    Creates sqlite database and users table
    """
    database = settings.DATABASE
    sql_create_users_table = """CREATE TABLE IF NOT EXISTS users (
       id integer PRIMARY KEY,
       name text NOT NULL,
       email text NOT NULL UNIQUE,
       password text NOT NULL,
       is_active integer NOT NULL,
       login_attempts integer NOT NULL,
       last_login_attempt timestamp,
       token text NOT NULL
    );"""

    # create a database connection
    conn = create_connection(database)

    if conn is not None:
        try:
            # create users table
            create_table(conn, sql_create_users_table)
        except Error as e:
            print(e)
        finally:
            conn.close()

    else:
        print('Error! could not create database connection')
        exit(0)
Exemple #2
0
def main():
    try:
        start_time = datetime.now()
        logger.info("Starting Iris!")
        create_table()
        run_similar_user_generator(
            SimilarInterestUserModel,
            sim_interest_model_params,
            UserInterestDataProcessor,
            model_handle="tfidf_user_interest",
        )
        run_similar_user_generator(
            UserCourseViewSimilarityCFModel,
            sim_course_view_params,
            UserCourseViewTimeDataProcessor,
            model_handle="knn_collab_filtering_user_course_view",
        )
        run_similar_user_generator(
            UserCourseLevelViewSimilarityCFModel,
            sim_course_level_model_params,
            UserCourseLevelDataProcessor,
            model_handle="dnn_collab_filtering_user_course_level",
        )
        logger.info("Iris Done Generating Similar Users!")
        logger.info("Time of running the script: {}".format(datetime.now() -
                                                            start_time))
    except:
        log_exception()
Exemple #3
0
def dns_capture():
    conn = create_connection("./maldns.db")
    if conn == None:
        sys.exit(1)

    create_table(conn)

    if config.interface not in netifaces.interfaces():
        logging.error("Bad interface. Check config.py")
        sys.exit(1)

    cap = pyshark.LiveCapture(interface=config.interface,
                              bpf_filter='udp port 53')

    for pkt in cap.sniff_continuously():
        store_dns_info(pkt, conn)

    conn.close()
Exemple #4
0
def create_base_table(conn):
    '''
        Description: Define the SQL to create the program_documents table, and call the
                     create_table() function to execute the command
    '''
    prog_docs_sql = """ CREATE TABLE IF NOT EXISTS program_documents (
                          id integer PRIMARY KEY AUTOINCREMENT,
                          programDocumentID text,
                          documentTypeId text,
                          documentTypeDescription text,
                          documentTypeCode text,
                          documentTypeSortOrder text,
                          geographicAreaId text,
                          geographicAreaCode_ISO3 text,
                          geographicAreaName text,
                          organizationId text,
                          organizationName text,
                          componentId text,
                          componentName text,
                          grantAgreementId text,
                          grantAgreementNumber text,
                          implementationPeriodId text,
                          implementationPeriodName text,
                          processName text,
                          processYear text,
                          processWindow text,
                          fileName text,
                          fileIndex text,
                          fileExtension text,
                          fileSize text,
                          fileLanguage text,
                          fileModifiedDateTime text,
                          fileCreatedDateTime text,
                          fileURL text
                      ); """

    if conn is not None:
        create_table(conn, prog_docs_sql)
def test_create_table():

    connection = sqlite3.connect(':memory:')

    # Normal usage: create table
    create_table(connection, 'SPY', database='eod', replace_existing=False)

    with connection:
        cursor = connection.cursor()

        # get table name
        table = cursor.execute(
            "SELECT name FROM sqlite_master WHERE TYPE='table'").fetchone()[0]

        assert ('SPY' == table)

        # get table columns
        cursor.execute('SELECT * FROM SPY')
        columns = [description[0] for description in cursor.description]

        assert ('Time' == columns[0])
        assert ('Open' == columns[1])
        assert ('High' == columns[2])
        assert ('Low' == columns[3])
        assert ('Close' == columns[4])
        assert ('Adj_Close' == columns[5])
        assert ('Volume' == columns[6])

    create_table(connection, 'AAPL', database='eod', replace_existing=False)

    with connection:
        cursor = connection.cursor()
        tables = cursor.execute(
            "SELECT name FROM sqlite_master WHERE TYPE='table'")
        tables = tables.fetchall()
        tables = [table[0] for table in tables]

        assert ('SPY' == tables[0])
        assert ('AAPL' == tables[1])
        assert (len(tables) == 2)

    # replace table, expect no error, just warning
    create_table(connection, 'AAPL', database='eod', replace_existing=True)
Exemple #6
0

def get_item_sources(item_url):
    item_soup = BeautifulSoup(requests.get(item_url).content, 'lxml')
    return list(map(lambda x: x['href'], item_soup.find(id='sources').find_all('a')))


def fetch_items(item_id, items_count=10):
    items_list = []

    for item_number in range(item_id, max(item_id - items_count, 1), -1):
        item_url = URL + '/' + str(item_number)
        sources = get_item_sources(item_url)
        sources[0] = sources[0][56:]
        items_list.append((item_number, json.dumps(sources)))

    return items_list


if __name__ == '__main__':
    soup = BeautifulSoup(requests.get(URL).content, 'lxml')
    last_id = int(soup.find('div', 'overlay').string.strip())

    db_utils.connect_database('archillect_images.db')
    db_utils.create_table('sources')
    db_utils.insert_many('sources', fetch_items(int(last_id), 10))

    items = db_utils.get_items('sources', tuple(range(last_id, last_id-10, -1)))
    for item_id, item_sources in items:
        print(item_id, ': ', item_sources, sep='')
def from_database(tickers, database='eod', kind=None, start=None, end=None):
    '''
    Gets ticker data from database.

    Parameters:
    -----------
    tickers: list or str
        Tickers to fetch.

    database: str
        Which database to query. 

        options: 'eod': end of day stock data

    kind: str
        Column of end of day data.


    '''

    if not isinstance(tickers, list):
        tickers = [tickers]

    data = {}

    databases = {
        'eod': 'data/eod_stock.sqlite3',
        'test_memory': ':memory:',
        'test_eod_db': 'test_datasets/test_eod_db.sqlite3',
        'test_update_db': 'test_datasets/test_update_db.sqlite3'
    }

    connection = sqlite3.connect(databases[database])

    with connection:

        for ticker in tickers:

            try:

                data[ticker] = read_table(connection=connection,
                                          table=ticker,
                                          database=database,
                                          start=start,
                                          end=end,
                                          kind=kind)

            #except sqlite3.OperationalError:
            except DatabaseError:
                # ticker not in db
                downloaded_data = get_from_yahoo(ticker, start=start, end=end)

                downloaded_data[ticker].rename(
                    columns={'Adj Close': 'Adj_Close'}, inplace=True)

                create_table(connection=connection, name=ticker)

                insert_rows(connection=connection,
                            table=ticker,
                            data=downloaded_data[ticker],
                            database=database)

                data[ticker] = read_table(connection=connection,
                                          table=ticker,
                                          database=database,
                                          start=start,
                                          end=end,
                                          kind=kind)
    return data