예제 #1
0
def new_entry(DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE,
              currency):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()

        query = "INSERT INTO " + currency + " (DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE) VALUES (%s, %s, %s, %s, %s, %s, %s, %s);"
        data = (DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE)

        cur.execute(query, data)
        #
        #cur.execute(str_to_record);

        # close communication with the PostgreSQL database server
        cur.close()
        # commit the changes
        conn.commit()
        # print ("Records in ", currency, " created successfully")
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #2
0
    def __init__(self):
        self.db_config = config.db_config()
        self.db_session = {}
        self.cache_session = {}

        #####################
        # MASTER DATA STORE #
        #####################
        _engine = create_engine('mysql+mysqldb://%s:%s@%s/%s' %
                                (self.db_config['PUPPET']['Username'],
                                 self.db_config['PUPPET']['Password'],
                                 self.db_config['PUPPET']['Host'],
                                 self.db_config['PUPPET']['Database']),
                                echo=True)

        # Bind the engine to the metadata of the PuppetBase class so that the
        # declaratives can be accessed through a DBSession instance
        PuppetModel.metadata.bind = _engine

        _db_session = scoped_session(sessionmaker(bind=_engine))
        self.db_session['puppet'] = _db_session()

        ######################
        # SESSION DATA CACHE #
        ######################
        self.cache_session['sessions'] = redis.StrictRedis(
            host=self.db_config['SESSION-CACHE']['Host'],
            port=self.db_config['SESSION-CACHE']['Port'],
            db=self.db_config['SESSION-CACHE']['Database'],
            socket_connect_timeout=3)
예제 #3
0
def db_connect_tool_database():
    vnf_params = db_config('database.ini', 'vnf_onboarding')
    print('Connecting to the PostgreSQL database...')
    conn = psycopg2.connect(**vnf_params)

    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    # conn.cursor will return a cursor object, you can use this cursor to perform queries
    cursor = conn.cursor()
    table_name = get_config_param('database.ini', 'Details', 'table')
    table_exist_query = "SELECT EXISTS(SELECT 1 FROM information_schema.tables WHERE table_catalog= %s AND  table_schema='public' AND  table_name=%s);"
    table_exist_data = (vnf_params['dbname'], table_name)
    cursor.execute(table_exist_query, table_exist_data)
    records = cursor.fetchall()
    pprint.pprint((records[0])[0])
    isTable = (records[0])[0]
    print(isTable)
    if len(records) > 0 and isTable == True:
        print "Database table {} exists".format(table_name)

    else:
        print "Database table {} does NOT exist".format(table_name)
        try:
            create_table = "CREATE TABLE {} (username varchar(18) UNIQUE not null,  password varchar(18) not null , emailid varchar(50) UNIQUE  not null)".format(
                table_name)
            #table_data = (AsIs(table_name))
            print("Creating Table %s", table_name)
            print("Query %s", create_table)
            cursor.execute(create_table)
            print("Table Created")
        except psycopg2.DatabaseError, e:
            print 'Error %s' % e
            sys.exit(1)
예제 #4
0
def get_raw_log_data(day, hour=18):
    '''从数据库读取od数据

    Parameters:
    ----------
    day : int
        9月的日期
    hour : int
        小时{0-23}

    Returns:
    ---------
    rows : list
        [ (log_time, car_id, on_service, x, y, v), ...]
    '''
    sql = '''select log_time, car_id, on_service, ST_X(geom), ST_Y(geom), velocity from gps_log_9_{day} where EXTRACT(HOUR FROM log_time) = {hour} order by car_id, log_time limit 10000;'''.format(day=day, hour=hour)
    
    conn = None
    try:
        params = db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()        
        cur.execute(sql)        
        rows = cur.fetchall()
        cur.close()
        return rows
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #5
0
def entry_check(entry_pos, currency_table):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()
        cur.execute("SELECT (DATE_K), TIME_K  FROM " + currency_table +
                    " ORDER BY DATE_K, TIME_K ")
        rows = cur.fetchall()
        # Check if there is any content in the table
        if len(rows) != 0:
            if entry_pos == 'first':
                last_entry = (rows[0])

            else:
                last_entry = (rows[-1])

            date_str = (str(last_entry[0]) + " " + str(last_entry[1]))
            #date_str = last_entry[0] + " " + last_entry[1]
            print(date_str)

            cur.close()

            return date_str
        else:
            return False

    except (Exception, psycopg2.DatabaseError) as error:
        # Return false, indicating most likely that the table does not exist
        return False
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #6
0
def eval_SSE(individual):
    """
    Calculate SSE for travel time from each simulation iteration.

    :type individual: toolbox object
    :param individual: gene from GA run holding binary info, e.g. "0001011100"
    :return: a single element tuple
    """
    sqlite_file = 'vissim_db.sqlite'
    conn = sqlite3.connect(sqlite_file)
    file = 'TravelTime_Google.csv'
    benchmark = pd.read_csv(file)

    cfg = config.db_config()
    cc0 = decode(cfg['cc0_max'], cfg['cc0_min'], individual[:7])
    cc1 = int(decode(cfg['cc1_max'], cfg['cc1_min'], individual[7:9]))
    cc4 = decode(cfg['cc4_max'], cfg['cc4_min'], individual[9:12])
    cc5 = decode(cfg['cc5_max'], cfg['cc5_min'], individual[-3:])
    input = {'cc0':cc0, 'cc1':cc1, 'cc4':cc4, 'cc5':cc5}

    result = mockSim(input)
    x =  benchmark.loc[benchmark.Route == 10, 'Average of Travel_Time(min)'].iloc[0] * 60
    result['Diff'] = result['TravTm'] - x
    sse = sum(result['Diff']**2)
    SSE = (sse, )

    finalResult = pd.DataFrame({'Route':result.iloc[0,0], 'Diff':np.mean(result['Diff']),
                                'SSE':sse,'cc0':cc0, 'cc1':cc1,'cc4':cc4, 'cc5':cc5,
                                'Timestamp':datetime.datetime.now()}, index = [0])
    finalResult.to_sql('result', conn, if_exists='append', index=False)

    return SSE
예제 #7
0
def db_check():
    """ Connect to the PostgreSQL database server """
    conn = None
    try:
        # read connection parameters
        params = config.db_config()

        # connect to the PostgreSQL server
        print('Connecting to the PostgreSQL database...')
        conn = psycopg2.connect(**params)

        # create a cursor
        cur = conn.cursor()

        # execute a statement
        print('PostgreSQL database version:')
        cur.execute('SELECT version()')

        # display the PostgreSQL database server version
        db_version = cur.fetchone()
        print(db_version)
        # close the communication with the PostgreSQL
        cur.close()
        # Assume that database is OK
        return True
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
            print('Database connection closed.')
예제 #8
0
def db_check_credentials(username, password):
    """ Connect to the PostgreSQL database server """
    conn = None
    # read connection parameters
    params = db_config('database.ini', 'vnf_onboarding')

    # connect to the PostgreSQL server
    print('Connecting to the PostgreSQL database...')
    conn = psycopg2.connect(**params)

    # conn.cursor will return a cursor object, you can use this cursor to perform queries
    cursor = conn.cursor()

    # execute our Query
    #cursor.execute("SELECT username,password FROM account WHERE ( account.username = username and account.password = password)")
    cursor.execute("SELECT username,password FROM vnf_onboarding_tool_users")

    # retrieve the records from the database
    records = cursor.fetchall()
    print(records)

    for rec in records:
        print rec
        if rec[0] == username and rec[1] == password:
            print("found record")
            return True
    print("Did not find record")
    cursor.close()
    return False
예제 #9
0
 def __init__(self):
     DB_CONFIG = db_config()
     self.mariadb_connection = mariadb.connect(
         host=DB_CONFIG['db_host'],
         port=DB_CONFIG['db_port'],
         user=DB_CONFIG['db_username'],
         password=DB_CONFIG['db_password'],
         database='hotscrap')
     self.cursor = self.mariadb_connection.cursor()
예제 #10
0
def db_user_signup(username, password, emailid):
    """ Connect to the PostgreSQL database server """
    conn = None
    # read connection parameters
    params = db_config()
    pprint.pprint(params)
    #pprint.pprint(params['users_database'])
    #dbname = params['users_database']

    # connect to the PostgreSQL server
    print('Connecting to the PostgreSQL database...')
    conn = psycopg2.connect(**params)
    #conn = psycopg2.connect(params['dbname'],params['user'],params['password'],params['host'])

    #Needed for Create DB op
    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    # conn.cursor will return a cursor object, you can use this cursor to perform queries
    cursor = conn.cursor()
    vnf_params = db_config('database.ini', 'vnf_onboarding')
    vnf_dbname = vnf_params['dbname']
    #cursor.execute("SELECT datname FROM pg_catalog.pg_database WHERE datname = 'vnf_onboarding_tool_db'")
    cursor.execute(
        "SELECT datname FROM pg_catalog.pg_database WHERE datname = %(dname)s",
        {'dname': vnf_dbname})
    records = cursor.fetchall()
    if len(records) > 0:
        print "Database {} exists".format(vnf_dbname)
    else:
        print "Databasedoes NOT exist"
        try:
            cdb_query = "CREATE DATABASE %s OWNER %s"
            cdb_data = (AsIs(vnf_dbname), params['user'])
            cursor.execute(cdb_query, cdb_data)
            #cursor.execute("CREATE DATABASE %(dname)s OWNER %(user)s;",{'dname': AsIs(vnf_dbname),'user':params['user']})
        except psycopg2.DatabaseError, e:
            print 'Error %s' % e
            sys.exit(1)

        #finally:
        #if con:
        #  con.close()
        pprint.pprint("Database %s Created successfully", vnf_dbname)
예제 #11
0
    def __init__(self):
        self.db_config = config.db_config()

        _engine = create_engine('mysql+mysqldb://%s:%s@%s/%s' %
                                (self.db_config['PUPPET']['Username'],
                                 self.db_config['PUPPET']['Password'],
                                 self.db_config['PUPPET']['Host'],
                                 self.db_config['PUPPET']['Database']),
                                echo=True)

        PuppetModel.metadata.create_all(_engine)
예제 #12
0
파일: db_psy.py 프로젝트: teilon/points
def insert_point_contact(point_name, contact_name, contact):
    conn = None
    try:
        conn = psycopg2.connect(db_config())
        params = {'var_point_name': point_name, 'var_contact_name': contact_name, 'var_contact': contact}
        exec_to(conn, 'insert_point_contact', params)
        conn.commit()
    except Exception as error:
        print('err| {}'.format(error))
    finally:
        if conn is not None:
            conn.close()
예제 #13
0
파일: db_psy.py 프로젝트: teilon/points
def insert_point(name, formfactor, payment, address):
    conn = None
    try:
        conn = psycopg2.connect(db_config())
        params = {'var_name': name, 'var_form': formfactor, 'var_payment': payment, 'var_address': address}
        exec_to(conn, 'insert_point', params)
        conn.commit()
    except Exception as error:
        print('err| {}'.format(error))
    finally:
        if conn is not None:
            conn.close()
예제 #14
0
파일: db_psy.py 프로젝트: teilon/points
def insert_trader(name):
    conn = None
    try:
        conn = psycopg2.connect(db_config())
        params = {'var_trader_name': name}
        exec_to(conn, 'insert_trader', params)
        conn.commit()
    except Exception as error:
        print('err| {}'.format(error))
    finally:
        if conn is not None:
            conn.close()
예제 #15
0
 def inner(*args, **kwargs):
     conn = None
     try:
         conn = psycopg2.connect(db_config())
         return fn(conn, *args, **kwargs)
         conn.commit()
     except Exception as error:
         print(error)
         print("err")
     finally:
         if conn is not None:
             conn.close()
예제 #16
0
파일: db_psy.py 프로젝트: teilon/points
def insert_trade(trader_name, point_name, count, sale, date=datetime.date.today()):
    conn = None
    try:
        conn = psycopg2.connect(db_config())
        params = {'var_trader_name': trader_name, 'var_point_name': point_name, 'var_count': count, 'var_sale': sale, 'var_date': date}
        exec_to(conn, 'insert_traded', params)
        conn.commit()
    except Exception as error:
        print('err| {}'.format(error))
    finally:
        if conn is not None:
            conn.close()
예제 #17
0
def db_remove_row(table, key_column, key):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()

        cur.execute("DELETE FROM " + table + " WHERE " + key_column + " = %s",
                    (key))

    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #18
0
def get_rows(com_str):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()
        #cur.execute("SELECT DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE FROM " + table + " ORDER BY DATE_K, TIME_K ")
        cur.execute(com_str)
        rows = cur.fetchall()
        print("The number of parts: ", cur.rowcount)
        for row in rows:
            print(row)
        cur.close()
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #19
0
def db_create_row(table, content_list):
    data = []
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()
        query = "INSERT INTO " + table + " ("
        for idx, element in enumerate(content_list):
            # If this is not the last element in the array, add comma for the next element
            if (idx + 1) < len(content_list):
                query = query + " " + element[0] + ","
            else:
                query = query + " " + element[0]

        query = query + ") VALUES ("

        # Add enough placeholders for each table value
        for idx, element in enumerate(content_list):
            if (idx + 1) < len(content_list):
                query = query + "%s, "
            else:
                query = query + "%s);"


#
# Save values of the table's elements:
        for idx, element in enumerate(content_list):
            data.append(element[1])

        #query =  "INSERT INTO " + table + " (DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE) VALUES (%s, %s, %s, %s, %s, %s, %s, %s);"
        #data = (DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE)

        cur.execute(query, data)
        # close communication with the PostgreSQL database server
        cur.close()
        # commit the changes
        conn.commit()
        # print ("Records in ", currency, " created successfully")
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #20
0
파일: db_psy.py 프로젝트: teilon/points
def db_connet():

    conn = None
    try:
        conn = psycopg2.connect(db_config())
        test(conn)
        fill_dics(conn)
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
        print("err")
    finally:
        if conn is not None:
            conn.close()

    insert_point('name1', 'sale point', 'real', 'Дильмухамеда 91')
    insert_point_contact('name1', 'Kamil', '8 701 449 19 12')
    insert_trader('Erba')
    insert_trade('Erba', 'name1', '10', '15000', datetime.date(2012, 12, 14))
    insert_trade('Erba', 'name1', '10', '15000')
예제 #21
0
def flush_table(table_to_flush):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()

        com_str = "DROP TABLE " + table_to_flush + ";"
        cur.execute(com_str)

        cur.close()
        # commit the changes
        conn.commit()
        print("table deleted")
    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #22
0
def insert_data(username, password, emailid):
    vnf_params = db_config('database.ini', 'vnf_onboarding')
    print('Connecting to the PostgreSQL database...')
    conn = psycopg2.connect(**vnf_params)

    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    # conn.cursor will return a cursor object, you can use this cursor to perform queries
    cursor = conn.cursor()
    table_name = get_config_param('database.ini', 'Details', 'table')
    try:
        #insertop = "INSERT INTO {} (username, password, emailid ) VALUES({},{},{})".format(table_name,username,password,emailid)
        insertop = "INSERT INTO %s (username, password, emailid ) VALUES(%s,%s,%s)"
        insertData = (AsIs(table_name), username, password, emailid)
        print("insert query = %s", insertop)
        cursor.execute(insertop, insertData)
    except psycopg2.DatabaseError, e:
        print 'Error %s' % e
        #sys.exit(1)
        return "False"
예제 #23
0
def new_table(com_str):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()
        # Create table with given name if it doesnt exists yet
        cur.execute(com_str)

        # close communication with the PostgreSQL database server
        cur.close()
        # commit the changes
        conn.commit()
        # print (table_name, " table created/existence verified successfully")
    except (Exception, psycopg2.DatabaseError) as error:
        # print ("Failed to create table for ", table_name)
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #24
0
def db_read_row(table, rows, key_column, key):
    conn = None
    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()

        #cur.execute("SELECT * FROM " + table + " WHERE (" + key_column + " = " + key + ")")
        query = "SELECT * FROM " + table + " WHERE " + key_column + " = %s"
        cur.execute(query, (key))
        #cur.execute("SELECT FROM " + table + " WHERE " + key_column + " = %s", (key))
        if cur.fetchone() is not None:
            row = cur.fetchone()
            return row
        else:
            return None

    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #25
0
def db_check_credentials(username, password):
    """ Connect to the PostgreSQL database server """
    conn = None
    # read connection parameters
    params = db_config('database.ini', 'vnf_onboarding')
    db_table = get_config_param('database.ini', 'Details', 'table')

    # connect to the PostgreSQL server
    print('Connecting to the PostgreSQL database...')
    conn = psycopg2.connect(**params)

    # conn.cursor will return a cursor object, you can use this cursor to perform queries
    cursor = conn.cursor()

    # execute our Query
    check_user_query = "SELECT username,password FROM {} WHERE ( {}.username = '******' )".format(
        db_table, db_table, username)

    try:

        cursor.execute(check_user_query)
    except:
        print "db_check_credentials:Cannot execute", check_user_query

    # retrieve the records from the database
    records = cursor.fetchall()
    print(records)

    for rec in records:
        print rec
        db_password = sha256.verify(password, rec[1])
        print "Verified password", db_password, rec[1]
        if rec[0] == username and db_password == True:
            print "db_check_credentials:User {} is Authenticated".format(
                username)
            return True
    print "Did not find user", username
    cursor.close()
    return False
예제 #26
0
def pg_db_connection(cursor_factory=None):
    try:
        from config import db_config
        db = db_config()
        con = psycopg2.connect(
            user=db['user'],
            database=db['database'],
            host=db['host'],
            port=int(db['port']),
            password=db['password'] if db['password'] is not None else '')
        if cursor_factory is not None:
            cur = con.cursor(cursor_factory=cursor_factory)
        else:
            cur = con.cursor()
    except psycopg2.Error as err:
        yield None, None, err
    else:
        try:
            yield con, cur, None
        finally:
            cur.close()
            con.commit()
            con.close()
예제 #27
0
def mysql_db_connection(cursor_factory=None):
    try:
        from config import db_config
        db = db_config()
        con = pymysql.connect(user=db['user'],
                              db=db['database'],
                              host=db['host'],
                              password=db['password'] if db['password'] is not None else '',
                              port=int(db['port']),
                              charset='utf8mb4',
                              cursorclass=pymysql.cursors.DictCursor)
        if cursor_factory is not None:
            cur = con.cursor(cursor=cursor_factory)
        else:
            cur = con.cursor()
    except pymysql.Error as err:
        yield None, None, err
    else:
        try:
            yield con, cur, None
        finally:
            cur.close()
            con.close()
예제 #28
0
def read_table(currency, period):
    conn = None
    market_data_arr = np.zeros(shape=(period, 6))
    date_arr = np.zeros(shape=(period, 1), dtype='datetime64[m]')

    try:
        params = config.db_config()
        conn = psycopg2.connect(**params)
        cur = conn.cursor()

        cur.execute(
            "SELECT DATE_K, TIME_K, BAL_VOLUME, VOLUME, OPEN, HIGH, LOW, CLOSE from "
            + currency)
        rows = cur.fetchall()
        # Extract only data within specified time
        for idx, row in enumerate(rows[-period:]):
            comb_date = datetime.combine((row[0]), (row[1]))
            date_arr[idx] = np.datetime64(comb_date)  # DATE

            market_data_arr[idx, 0] = row[2]  # BAL_VOLUME
            market_data_arr[idx, 1] = row[3]  # VOLUME
            market_data_arr[idx, 2] = row[4]  # OPEN
            market_data_arr[idx, 3] = row[5]  # HIGH
            market_data_arr[idx, 4] = row[6]  # LOW
            market_data_arr[idx, 5] = row[7]  # CLOSE


#
        print("Required records successfully retrieved")
        conn.close()
        return date_arr, market_data_arr

    except (Exception, psycopg2.DatabaseError) as error:
        print(error)
    finally:
        if conn is not None:
            conn.close()
예제 #29
0
파일: db.py 프로젝트: lujinda/tuxpy
#!/usr/bin/env python
#coding:utf8
# Author          : tuxpy
# Email           : [email protected]
# Last modified   : 2014-12-07 15:21:40
# Filename        : data/db.py
# Description     : 
import pymongo
from config import db_config, cfg_file

cfg = db_config()
db = pymongo.Connection(cfg.get('mongodb', 'host'),
        int(cfg.get('mongodb', 'port'))).tuxpy

예제 #30
0
파일: task1.py 프로젝트: Velcorn/DIS
def etl():
    try:
        params = db_config()
        connection = connect(**params)
        cursor = connection.cursor()

        # Clear DB before executing scripts.
        print("Clearing DB...")
        cursor.execute("drop schema public cascade; "
                       "create schema public;")
        print("Cleared DB.\n")

        # Execute given and star schema script.
        print("Executing SQL scripts...")
        cursor.execute(open("stores-and-products.sql", encoding="utf-8").read())
        cursor.execute(open("star-schema.sql", encoding="utf-8").read())
        connection.commit()
        print("Executed SQL scripts.\n")

        # Transfer existing data to the new tables.
        print("Transferring existing data...")
        print("Transferring shops...")
        cursor.execute("select s.shopid, s.name, ci.name, r.name, co.name "
                       "from shop s "
                       "join city ci on s.cityid = ci.cityid "
                       "join region r on ci.regionid = r.regionid "
                       "join country co on r.countryid = co.countryid")
        shops = cursor.fetchall()
        for s in shops:
            cursor.execute("insert into shopid(id, name, city, region, country) "
                           "values(%s, %s, %s, %s, %s) "
                           "on conflict (id) do nothing",
                           (s[0], s[1], s[2], s[3], s[4]))
            connection.commit()

        print("Transferring articles...")
        cursor.execute("select a.articleid, a.name, a.price, pg.name, pf.name, pc.name "
                       "from article a "
                       "join productgroup pg on a.productgroupid = pg.productgroupid "
                       "join productfamily pf on pg.productfamilyid = pf.productfamilyid "
                       "join productcategory pc on pf.productcategoryid = pc.productcategoryid")
        articles = cursor.fetchall()
        for a in articles:
            cursor.execute("insert into articleid(id, name, price, productgroup, productfamily, productcategory) "
                           "values(%s, %s, %s, %s, %s, %s) "
                           "on conflict (id) do nothing",
                           (a[0], a[1], a[2], a[3], a[4], a[5]))
            connection.commit()
        print("Transferred existing data.\n")

        print("Writing data from CSV file to DB...")
        print("Writing dates...")
        with open("sales.csv", "r", encoding="latin-1") as f:
            # Open a CSV reader, skip headline, initialize dateid, list of dateids and list of dates.
            reader = csv.reader(f, delimiter=";")
            next(reader)
            did = 1
            dids = []
            dates = []
            # Iterate over each line, split the date string into day, month, year,
            # check if it's already in the list and append if not. Increase count.
            for row in reader:
                if row[0].split(".") not in dates:
                    dids.append(did)
                    dates.append(row[0].split("."))
                    did += 1

            # Zip dateids and dates together and write to the table.
            dateids = list(zip(dids, dates))
            for d in dateids:
                cursor.execute("insert into dateid(id, day, month, year) "
                               "values(%s, %s, %s, %s) "
                               "on conflict (id) do nothing",
                               (d[0], d[1][0], d[1][1], d[1][2]))
                connection.commit()

        print("Writing data...")
        amount = len(open("sales.csv").readlines())
        with open("sales.csv", "r", encoding="latin-1") as f:
            reader = csv.reader(f, delimiter=";")
            next(reader)
            id = 1
            count = 0
            # Give a progress counter for amount of lines already processed.
            for row in reader:
                if count % 1000 == 0:
                    print(str(count) + "/" + str(amount))

                # Get ids from tables, check if exists.
                cursor.execute("select id from dateid "
                               "where day = %s "
                               "and month = %s "
                               "and year = %s",
                               (row[0][:2], row[0][3:5], row[0][6:10]))
                did = cursor.fetchone()
                if did:
                    did = did[0]

                cursor.execute("select id from shopid "
                               "where name = %s",
                               (row[1],))
                sid = cursor.fetchone()
                if sid:
                    sid = sid[0]

                cursor.execute("select id from articleid "
                               "where name = %s",
                               (row[2],))
                aid = cursor.fetchone()
                if aid:
                    aid = aid[0]

                # Check if proper values are present.
                try:
                    sold = int(row[3])
                except ValueError:
                    sold = row[3]

                try:
                    rev = float(row[4].replace(",", "."))
                except ValueError:
                    rev = row[4]

                # Check again before writing to DB.
                if isinstance(did, int) and isinstance(sid, int) and isinstance(aid, int) \
                        and isinstance(sold, int) and isinstance(rev, float):
                    cursor.execute("insert into facttable(id, dateid, shopid, articleid, sold, revenue) "
                                   "values(%s, %s, %s, %s, %s, %s) "
                                   "on conflict (id) do nothing",
                                   (id, did, sid, aid, sold, rev))
                    connection.commit()
                    id += 1
                    count += 1

        cursor.close()
        connection.close()
        return "\nFinished writing data from CSV file."
    except (Exception, Error) as error:
        return error
    finally:
        if connection:
            cursor.close()
            connection.close()
예제 #31
0
SQL_GET_RANDOM_FILE='''WITH aggr AS (
	SELECT MAX(sf.Id) AS ID 
		FROM public.S3_Files sf
		GROUP BY sf.FileName
	)
SELECT sf.id,  sf.PublicUrl, sf.FileName, sf.Created_At 
FROM aggr
JOIN public.S3_Files sf on sf.Id=aggr.Id
ORDER BY RANDOM() LIMIT 1'''

SQL_SET_FILES="INSERT INTO public.S3_Files (PublicUrl, FileName) VALUES (%s,%s)"
##inserts only, kind of log, no merge and unique by name
###

params = db_config()

logging.basicConfig(filename='app.log', filemode='w', format='%(asctime)s -%(name)s - %(levelname)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.ERROR)
# for some reason for ubuntu we should write absolute path wothout first slash:  filename='home/ubuntu/flaskapp/app.log'
def test_connect():
    """ Connect to the PostgreSQL database server """
    logging.error("test log")
    conn = None
    try:
        # read connection parameters
        conn = psycopg2.connect(**params)		
        # create a cursor
        cur = conn.cursor()
        cur.execute('SELECT version()')
        # display the PostgreSQL database server version
        db_version = cur.fetchone()