def connect(): conn = None try: params = dbconfig() print('Connecting to the PostgreSQL database...') conn = psycopg2.connect(**params) cur = conn.cursor() cur.execute('SELECT version()') db_version = cur.fetchone() print("postgresql version = ", db_version) db = CalcDB(conn, cur) db.initialize_table() db.initial_test() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close() print('Database connection closed.')
def get_team(team_id): """ query data from the vendors table """ conn = None try: params = dbconfig() conn = psycopg2.connect(**params) cur = conn.cursor() sql = """ SELECT id, azure_id FROM team_boards WHERE azure_id = %s ORDER BY id""" #sql.format(*team_id) cur.execute(sql, ([team_id])) print("The number of parts: ", cur.rowcount) #row = cur.fetchone() #while row is not None: # print(row) # row = cur.fetchone() cur.close() return cur.rowcount except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def createtable(): """ Connect to the PostgreSQL database server """ conn = None try: # read connection parameters params = dbconfig() # connect to the PostgreSQL server conn = psycopg2.connect(**params) # create a cursor cur = conn.cursor() print("The database is live.") # execute and comit the sql statement with open("create.sql", 'r') as f: cur.execute(f.read()) conn.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: # close the cursor with the PostgreSQL cur.close() # close the connection with the PostgreSQL conn.close() print("The database is closed.")
def connectcheck(): """ Connect to the PostgreSQL database server """ conn = None try: # read connection parameters params = dbconfig() # connect to the PostgreSQL server conn = psycopg2.connect(**params) # create a cursor cur = conn.cursor() print("The database is live.") # execute a statement cur.execute("SELECT version();") # display the PostgreSQL database server version version = cur.fetchone() print("You are connected to - ", version,"\n") except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: # close the cursor with the PostgreSQL cur.close() # close the connection with the PostgreSQL conn.close() print("The database is closed.")
def connection(): try: params = dbconfig() conn = psycopg2.connect(**params) return conn except (Exception, psycopg2.DatabaseError) as error: print(error)
def createTable(sql_commands, df): conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() f = io.StringIO() df.to_csv(f, sep='\t', header=False, index=False, na_rep='NULL') f.seek(0) print('total number of records: %s' % len(df)) execQuery(sql_commands)
def execQuery(sql_commands): conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() try: for command in sql_commands: print(command) dbcursor.execute(command) except (Exception, psycopg2.DatabaseError) as error: print(error) finally: conn.commit() conn.close()
def select_records( tableName, selectFields = 'id,created_date', where = '1', orderBy = 'id'): """ query parts from the given table """ """This function checks for record in the given table as per parameters""" """ where : Conditions in String""" """ tableName : The table to search""" """ selectFields : Select columns to retrun""" """ orderBy : Order by record after select""" conn = None try: # Connect DB params = dbconfig() conn = psycopg2.connect(**params) cur = conn.cursor() if where == '1': condition = "deleted = '0'" else: condition = where +" AND deleted = '0'" # Form the SQL based on the given paremeters sql = """ SELECT """+ selectFields +""" FROM """+ tableName +""" WHERE """+ condition +""" ORDER BY """+ orderBy logging.info('check_record_available: ' + sql) cur.execute(sql) result = cur.fetchall() #split the columns in to list column = selectFields.split(",") # prepare the result to convert in to JSON with key value pair items = [ dict(zip(column, row)) for row in result] jsonData = json.dumps(items, indent=4) logging.info('Record Count: ' + str(cur.rowcount)) cur.close() return jsonData except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def check_record_available( recordId, tableName, whereField = 'id', selectFields = 'id,created_date', orderBy = 'id', retrunValue = 0): """This function checks for record in the given table as per parameters""" """ recordId : The record ID value to search""" """ tableName : The table to search""" """ whereField : The field to Search by default it will search in 'id' field""" """ selectFields : Select columns to retrun""" """ orderBy : Order by record after select""" """ retrunValue : RetrunValue '0' - Will return selcted row, '1' - return True / False. By default '0'.""" conn = None try: # Connect DB params = dbconfig() conn = psycopg2.connect(**params) cur = conn.cursor() # Form the SQL based on the given paremeters sql = """ SELECT """+ selectFields +""" FROM """+ tableName +""" WHERE """+ whereField +""" = %s AND deleted = '0' ORDER BY """+ orderBy logging.info('check_record_available: ' + sql) logging.info('Check record in '+ tableName +' DB table: Where '+ whereField +' = '+recordId) cur.execute(sql, ([recordId])) if retrunValue == 0: # Fetch one row data returnDetails = cur.fetchone() else: # To return Record count returnDetails = cur.rowcount logging.info('Record Count: ' + str(cur.rowcount)) cur.close() return returnDetails except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
def insert_record(tableName,insertData): conn = None try: params = dbconfig() conn = psycopg2.connect(**params) cur = conn.cursor() # Declare the List column = [] columnValue = [] for key, value in insertData.items(): column.append(key) columnValue.append(value) # Join the columns and Values insertColumn = ','.join(map(str, column)) insertcolumnValue = ", ".join("'{0}'".format(values) for values in columnValue) sql = """ INSERT INTO """+ tableName +"""("""+ insertColumn +""") VALUES("""+ insertcolumnValue +""") RETURNING id;""" logging.info('Inserting: '+sql) cur.execute(sql) # get the generated id back recordId = cur.fetchone()[0] # commit the changes to the database conn.commit() # close communication with the database cur.close() logging.info('Succesfully Inserted.') return recordId except (Exception, psycopg2.DatabaseError) as error: logging.debug(error) print(error) finally: if conn is not None: conn.close() conn.close()
cmd += '%s integer,' % col else: cmd += '%s numeric,' % col cmd += 'CONSTRAINT tddist_key PRIMARY KEY (ModifiedTMC, TOD),' cmd += 'CONSTRAINT unique_rec UNIQUE (ModifiedTMC, TOD))' sql_commands.append(cmd) sql_commands.append('GRANT SELECT ON TABLE %s.%s TO anon' % (SCHEMA_NAME, TABLE_NAME)) sql_commands.append('GRANT SELECT ON TABLE %s.%s TO staff' % (SCHEMA_NAME, TABLE_NAME)) f = StringIO.StringIO() df.to_csv(f, sep='\t', header=False, index=False, na_rep='NULL') f.seek(0) conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() print 'total number of records: %s' % len(df) try: for command in sql_commands: dbcursor.execute(command) dbcursor.copy_from(f, '%s.%s' % (SCHEMA_NAME, TABLE_NAME), sep='\t', null='NULL') except (Exception, psycopg2.DatabaseError) as error: print error finally:
import json from flask_pymongo import PyMongo import pymongo from pymongo import MongoClient from bson.objectid import ObjectId from werkzeug.security import generate_password_hash, check_password_hash app = Flask(__name__) app.secret_key = "randomstring123" if app.config['DEBUG'] == True: from config import dbconfig app.config["MONGO_DBNAME"] = 'dumpdinners' app.config["MONGO_URI"] = dbconfig() else: app.config['MONGO_URI'] = os.environ.get("MONGO_URI") app.config['MONGO_DBNAME'] = os.environ.get("MONGO_DBNAME") mongo = PyMongo(app) @app.route('/') @app.route('/index') def index(): """Home page the gets 4 recipes from DB that have been viewed the most""" four_recipes = mongo.db.recipes.find().limit(4) return render_template('allrecipes1.html', recipes=four_recipes)
def read_file_and_process(filepath, TABLE_NAME, SCHEMA_NAME): """ Read news authors' files and process to the format for database. """ df = pd.read_csv(filepath) used_columns = [ "author", "rep_score", "author_screen_name", 'rep_score_rank' ] df = df[used_columns] df.rename(columns={ 'author': 'author_name', 'rep_score': 'reputation_score', 'rep_score_rank': 'rank' }, inplace=True) df['tweet_site'] = 'https://twitter.com/' + df.author_screen_name df.drop(columns=['author_screen_name'], inplace=True) OBJ_COLS = ['author_name', 'tweet_site'] INT_COLS = ['reputation_score', 'rank'] TIME_COLS = [] df = df.replace([np.inf, -np.inf], np.nan) for col in df.columns: if col in OBJ_COLS: df[col] = df[col].astype(str) df.loc[df[col] == 'nan', col] = '' elif col in INT_COLS: df[col] = df[col].map('{:.4f}'.format) df.loc[df[col] == 'nan', col] = 'NULL' elif col in TIME_COLS: df.loc[df[col] == 'nan', col] = '' columns = df.columns print('total number of records: %s' % len(df)) # Connect RDS database and copy data into database conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() # update_comm = 'INSERT INTO %s.%s (%s) VALUES (' % # (SCHEMA_NAME, TABLE_NAME, ','.join(columns)) # for row in df.iterrows(): for i in range(len(df)): f = StringIO() df.iloc[[i]].to_csv(f, sep='\t', header=False, index=False, na_rep='NULL') f.seek(0) try: # update_comm += '%s, %.2f, %s' % (row) + ');' # dbcursor.execute(,row) dbcursor.copy_from(f, '%s.%s' % (SCHEMA_NAME, TABLE_NAME), sep='\t', null='NULL', columns=(columns)) conn.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) conn.rollback() if ((i + 1) % 100 == 0): print('copy %d records' % (i + 1)) conn.close()
print(len(timeArr)) query = ('SELECT timestamp' 'FROM lidarData' 'WHERE timestamp >= ' + str(lastTime) + 'AND timestamp <' + str(lastTime + interval) + ';') cursor.execute(query) lidarData = cursor.fetchall()[0][0] i = 0 while i < len(timeArr): messageStr = "[" + str(lidarData[0]) for j in range(1, len(lidarData)): messageStr += "," + str(lidarData[j]) messageStr += "]" print(messageStr) pub.publish(messageStr) rate.sleep() i += 1 if __name__ == '__main__': config.dbconfig() try: talker() except rospy.ROSInterruptException: pass
def read_file_and_process(filepath, TABLE_NAME, SCHEMA_NAME): """ Read news articles' files and process to the format for database. """ df = pd.read_csv(filepath) df.rename(columns={ 'thread.site_full': 'site_full', 'thread.main_image': 'main_image', 'url': 'post_link', 'topic': 'news_topic', 'published': 'published_time', 'sentiment': 'sentiment_score', 'thread.uuid': 'article_id', 'General_Topic': 'general_topic' }, inplace=True) OBJ_COLS = [ 'title', 'author', 'site_full', 'main_image', 'post_link', 'news_topic', 'article_id', 'general_topic' ] INT_COLS = ['controversy_score', 'sentiment_score'] TIME_COLS = ['published_time'] df = df.replace([np.inf, -np.inf], np.nan) for col in df.columns: if col in OBJ_COLS: df[col] = df[col].astype(str) df.loc[df[col] == 'nan', col] = '' elif col in INT_COLS: df[col] = df[col].map('{:.4f}'.format) df.loc[df[col] == 'nan', col] = 'NULL' elif col in TIME_COLS: df.loc[df[col] == 'nan', col] = '' columns = df.columns print('total number of records: %s' % len(df)) # Connect RDS database and copy data into database conn = config.connect(config.dbconfig()) dbcursor = conn.cursor() for i in range(len(df)): f = StringIO() df.iloc[[i]].to_csv(f, sep='\t', header=False, index=False, na_rep='NULL') f.seek(0) try: dbcursor.copy_from(f, '%s.%s' % (SCHEMA_NAME, TABLE_NAME), sep='\t', null='NULL', columns=(columns)) conn.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) conn.rollback() if ((i + 1) % 100 == 0): print('copy %d records' % (i + 1)) conn.close()