def insert_group_user_relations(user_groups): ''' Create relation between groups and users parameters: users_groups (dict) -- dict of key-value format {user: [group1, group2, ...]} ''' conn = None id = None # Load config parameters from database.ini params = config() # Connect to the PostgreSQL server conn = psycopg2.connect(**params) # Create new cursor cur = conn.cursor() for user, groups in user_groups.items(): for group in groups: sql = ''' INSERT INTO group_users (mfp_username, group_name) VALUES (%s, %s); ''' try: cur.execute(sql, (user, group)) except (Exception, psycopg2.DatabaseError) as error: print(error) # commit the changes conn.commit() # close communication with the PostgreSQL database server conn.close() cur.close()
def __init__(self): """ Connects to the PostgreSQL database server """ self._connection = None # read connection parameters params = config() # create connection to the PostgreSQL server self._connection = psycopg2.connect(**params) self._connection.autocommit = True # persist changes to DB automatically
def connect_db(): try: params = config() connection = pg2.connect(**params) db_cursor = connection.cursor() qr = 'SELECT version()' db_cursor.execute(qr) print(db_cursor.fetchone()) db_cursor.close() except (Exception, pg2.DatabaseError) as error: print(error) finally: if connection is not None: connection.close()
def db_check_user(user): ''' Check is username already exists in the database. If it exists, return the most recent date from which an entry has been logged, else return 0. parameters: users (string) -- username ''' conn = None id = None # Load config parameters from database.ini params = config() # Connect to the PostgreSQL server conn = psycopg2.connect(**params) # Create new cursor cur = conn.cursor() sql = ''' SELECT 1 FROM nutrition WHERE mfp_username = %s LIMIT 1 ''' # Execute the sql statement to check if the username is already in the datebase cur.execute(sql, (user, )) # Get the response records (0=user not in db or 1=user exists in database) user_exists = cur.fetchall() if user_exists: sql = ''' SELECT MAX(entry_date) FROM nutrition WHERE mfp_username = %s; ''' cur.execute(sql, (user, )) # close communixcation with the PostgreSQL database server last_updated_date = cur.fetchall() last_updated_date = datetime.strftime(last_updated_date[0][0], '%Y-%m-%d') conn.close() cur.close() return (user_exists, last_updated_date) else: return (0, 0)
def connect(): """ Connect to the PostgreSQL database server """ conn = None try: # read connection parameters params = config() # connect to the PostgreSQL server print('Connecting to the PostgreSQL database...') conn = psycopg2.connect(**params) return conn except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: #conn.close() print('Database connection closed.')
def getInfoOfYearData(year): conn = None try: params = config() print('Connecting to the database...') conn = psycopg2.connect(**params) cur = conn.cursor() print('Consulting data') cur.execute('''SELECT C.name, C.state, C.latitude, C.longitude, D.maxtemp, D.mintemp, D.avgtemp FROM city AS C INNER JOIN city_data AS CD ON C.city_id = CD.city_id INNER JOIN data AS D ON CD.data_id = D.data_id WHERE CD.year = ''' + year) ciudad = [] for row in cur: ciudad.append(row) cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close() print('Database connection closed')
def return_data(user, date_start, date_end): ''' Return query output as a dataframe for the provided username between the given date ranges parameters: users (string) -- username date_start (string) -- start date date_end (string) -- end date ''' conn = None id = None # Load config parameters from database.ini params = config() # Connect to the PostgreSQL server conn = psycopg2.connect(**params) # Create new cursor, query data cur = conn.cursor() df = psql.read_sql( "SELECT * FROM nutrition \ WHERE (mfp_username = '******' AND entry_date >= '%s' AND entry_date <= '%s');" % (user, date_start, date_end), conn) # Drop all non-empty columns df.dropna(axis='columns', how='all', inplace=True) df.fillna(0, inplace=True) # If there weren't any entires, return a default dataframe of 0's if len(df.columns) <= 3: df['calories'] = 0 df['item'] = None df['protein'] = 0 df['carbohydrates'] = 0 df['fat'] = 0 df['fiber'] = 0 df['sugar'] = 0 conn.close() cur.close() return df
def execute_sql(sql, *argv): ''' Execute the input SQL statement on the postgreSQL database parameters: sql (string) -- sql query to executre ''' conn = None id = None try: # Load config parameters from database.ini params = config() # Connect to the PostgreSQL server conn = psycopg2.connect(**params) # Create new cursor cur = conn.cursor() # Execute the SQL command if len(argv) > 0: for arg in argv: [cur.execute(sql, (i, )) for i in arg] else: cur.execute(sql) # commit the changes conn.commit() # close communication with the PostgreSQL database server cur.close() except (Exception, psycopg2.DatabaseError) as error: print(error) finally: if conn is not None: conn.close()
class DataSource(object): data_source_files = config(section='data_source_files') raw_station_inventory_file = data_source_files.get( 'raw_station_inventory_file') collision_data_ottawa_2014 = data_source_files.get( 'collision_data_ottawa_2014') collision_data_ottawa_2015 = data_source_files.get( 'collision_data_ottawa_2015') collision_data_ottawa_2016 = data_source_files.get( 'collision_data_ottawa_2016') collision_data_ottawa_2017 = data_source_files.get( 'collision_data_ottawa_2017') # collision_data_calgary = data_source_files.get('collision_data_calgary') collision_data_toronto = data_source_files.get('collision_data_toronto') climate_data_alberta_1 = data_source_files.get('climate_data_alberta_1') climate_data_alberta_2 = data_source_files.get('climate_data_alberta_2') climate_data_alberta_3 = data_source_files.get('climate_data_alberta_3') climate_data_alberta_4 = data_source_files.get('climate_data_alberta_4') climate_data_alberta_5 = data_source_files.get('climate_data_alberta_5') climate_data_alberta_6 = data_source_files.get('climate_data_alberta_6') climate_data_ontario_1 = data_source_files.get('climate_data_ontario_1') climate_data_ontario_2 = data_source_files.get('climate_data_ontario_2') climate_data_ontario_3 = data_source_files.get('climate_data_ontario_3') climate_data_ontario_4 = data_source_files.get('climate_data_ontario_4') climate_data_ontario_5 = data_source_files.get('climate_data_ontario_5') climate_data_ontario_6 = data_source_files.get('climate_data_ontario_6') def __init__( self, populate_ontario_climate_data=False, populate_alberta_climate_data=False, # populate_calgary_climate_data=False, populate_ottawa_climate_data=False, populate_toronto_climate_data=False, # populate_calgary_collision_data=False, populate_ottawa_collision_data=False, populate_toronto_collision_data=False, populate_raw_station_inventory_data=False, populate_station_inventory_data=False): # POPULATE CLIMATE DATA self.populate_ontario_climate_data = populate_ontario_climate_data self.populate_alberta_climate_data = populate_alberta_climate_data # self.populate_calgary_climate_data = populate_calgary_climate_data self.populate_ottawa_climate_data = populate_ottawa_climate_data self.populate_toronto_climate_data = populate_toronto_climate_data # POPULATE COLLISION DATA # self.populate_calgary_collision_data = populate_calgary_collision_data self.populate_ottawa_collision_data = populate_ottawa_collision_data self.populate_toronto_collision_data = populate_toronto_collision_data # POPULATE STATION INVENTORY DATA self.populate_raw_station_inventory_data = populate_raw_station_inventory_data self.populate_station_inventory_data = populate_station_inventory_data def populate(self): if self.populate_ottawa_collision_data: self.populate_ottawa_collision_table() # if self.populate_calgary_collision_data: # self.populate_calgary_collision_table() if self.populate_toronto_collision_data: self.populate_toronto_collision_table() if self.populate_ontario_climate_data: self.populate_ontario_climate_table() if self.populate_alberta_climate_data: self.populate_alberta_climate_table() # if self.populate_calgary_climate_data: # self.populate_calgary_climate_table() if self.populate_ottawa_climate_data: self.populate_ottawa_climate_table() if self.populate_toronto_climate_data: self.populate_toronto_climate_table() if self.populate_raw_station_inventory_data: self.populate_raw_station_inventory_table() if self.populate_station_inventory_data: self.populate_station_inventory_table() def populate_ottawa_collision_table(self): print("Populating data_source.collision_data_ottawa...") sql = """COPY data_source.collision_data_ottawa( collision_id, location, x, y, longitude, latitude, date, time, environment, light, surface_condition, traffic_control, traffic_control_condition, collision_classification, impact_type, no_of_pedestrians) FROM STDIN CSV HEADER NULL ''""" self.copy_data_from_csv(self.collision_data_ottawa_2014, sql) self.copy_data_from_csv(self.collision_data_ottawa_2015, sql) self.copy_data_from_csv(self.collision_data_ottawa_2016, sql) self.copy_data_from_csv(self.collision_data_ottawa_2017, sql) print("data_source.collision_data_ottawa successfully populated.") # def populate_calgary_collision_table(self): # print("Populating data_source.collision_data_calgary...") # # sql = """COPY data_source.collision_data_calgary( # date, # collision_location, # collision_severity, # comm_name, # comm_code, # latitude, # longitude, # point, # "4a3i-ccfj", # "4b54-tmc4", # "p8tp-5dkv", # "kxmf-bzkv") # FROM STDIN CSV HEADER NULL ''""" # # self.copy_data_from_csv(self.collision_data_calgary, sql) # # print("data_source.collision_data_calgary successfully populated.") def populate_toronto_collision_table(self): print("Populating data_source.collision_data_toronto...") sql = """COPY data_source.collision_data_toronto( x, y, index_, accnum, year, date, time, hour, street1, street2, "offset", road_class, district, latitude, longitude, loccoord, accloc, traffctl, visibility, light, rdsfcond, acclass, impactype, invtype, invage, injury, fatal_no, initdir, vehtype, manoeuver, drivact, drivcond, pedtype, pedact, pedcond, cyclistype, cycact, cyccond, pedestrian, cyclist, automobile, motorcycle, truck, trsn_city_veh, emerg_veh, passenger, speeding, ag_driv, redlight, alcohol, disability, division, ward_name, ward_id, hood_id, hood_name, fid) FROM STDIN CSV HEADER NULL ''""" self.copy_data_from_csv(self.collision_data_toronto, sql) print("data_source.collision_data_toronto successfully populated.") def populate_ontario_climate_table(self): print("Populating data_source.climate_data_ontario...") sql = """COPY data_source.climate_data_ontario( date_time, year, month, day, time, temp_c, temp_flag, dew_point_temp_c, dew_point_temp_flag, rel_hum_percent, rel_hum_flag, wind_dir_10s_deg, wind_dir_flag, wind_spd_km_h, wind_spd_flag, visibility_km, visibility_flag, stn_press_kpa, stn_press_flag, hmdx, hmdx_flag, wind_chill, wind_chill_flag, weather, station_name, province) FROM STDIN CSV HEADER NULL ''""" self.copy_data_from_csv(self.climate_data_ontario_1, sql) print(self.climate_data_ontario_1 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_ontario_2, sql) print(self.climate_data_ontario_2 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_ontario_3, sql) print(self.climate_data_ontario_3 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_ontario_4, sql) print(self.climate_data_ontario_4 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_ontario_5, sql) print(self.climate_data_ontario_5 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_ontario_6, sql) print(self.climate_data_ontario_6 + " successfully inserted into db") print("data_source.climate_data_ontario successfully populated.") def populate_alberta_climate_table(self): print("Populating data_source.climate_data_alberta...") sql = """COPY data_source.climate_data_alberta( date_time, year, month, day, time, temp_c, temp_flag, dew_point_temp_c, dew_point_temp_flag, rel_hum_percent, rel_hum_flag, wind_dir_10s_deg, wind_dir_flag, wind_spd_km_h, wind_spd_flag, visibility_km, visibility_flag, stn_press_kpa, stn_press_flag, hmdx, hmdx_flag, wind_chill, wind_chill_flag, weather, station_name, province) FROM STDIN CSV HEADER NULL ''""" self.copy_data_from_csv(self.climate_data_alberta_1, sql) print(self.climate_data_alberta_1 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_alberta_2, sql) print(self.climate_data_alberta_2 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_alberta_3, sql) print(self.climate_data_alberta_3 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_alberta_4, sql) print(self.climate_data_alberta_4 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_alberta_5, sql) print(self.climate_data_alberta_5 + " successfully inserted into db") self.copy_data_from_csv(self.climate_data_alberta_6, sql) print(self.climate_data_alberta_6 + " successfully inserted into db") print("data_source.climate_data_alberta successfully populated.") # def populate_calgary_climate_table(self): # print("Populating data_source.climate_data_calgary...") # # sql = """CREATE TABLE data_source.climate_data_calgary AS # SELECT * FROM data_source.climate_data_alberta # WHERE station_name in %s""" # # self.create_filtered_stations_table(sql, CALGARY_STATIONS) # # print("data_source.climate_data_calgary successfully populated.") def populate_ottawa_climate_table(self): print("Populating data_source.climate_data_ottawa...") sql = """CREATE TABLE data_source.climate_data_ottawa AS SELECT * FROM data_source.climate_data_ontario WHERE station_name in %s""" self.create_filtered_stations_table(sql, OTTAWA_STATIONS) print("data_source.climate_data_ottawa successfully populated.") def populate_toronto_climate_table(self): print("Populating data_source.climate_data_toronto...") sql = """CREATE TABLE data_source.climate_data_toronto AS SELECT * FROM data_source.climate_data_ontario WHERE station_name in %s""" self.create_filtered_stations_table(sql, TORONTO_STATIONS) print("data_source.climate_data_toronto successfully populated.") def populate_raw_station_inventory_table(self): print("Populating data_source.raw_station_inventory...") sql = """COPY data_source.raw_station_inventory(name, latitude, longitude, elevation) FROM STDIN CSV HEADER NULL ''""" self.copy_data_from_csv(self.raw_station_inventory_file, sql) print("data_source.raw_station_inventory successfully populated.") def populate_station_inventory_table(self): print("Populating data_source.station_inventory...") sql = """CREATE TABLE data_source.station_inventory AS SELECT DISTINCT ON (name) * FROM data_source.raw_station_inventory WHERE name in %s""" self.create_filtered_stations_table(sql, WEATHER_STATIONS) print("data_source.station_inventory successfully populated.") @staticmethod def create_filtered_stations_table(sql, stations): db = DatabaseConnection() connection = db.get_connection() with connection.cursor() as cursor: cursor.execute(sql, (stations, )) @staticmethod def copy_data_from_csv(file_path, sql): db = DatabaseConnection() connection = db.get_connection() with connection.cursor() as cursor: with open(file_path) as f: cursor.copy_expert(file=f, sql=sql)
def insert_nutrition(users, last_date): ''' Collect all nutrition data from every user over the last 5 years and insert into the database parameters: users (list of strings) -- list of users to add to the database last_date (str) -- Most recent date that an entry has been recorded in the database for an input user ''' conn = None id = None # Load config parameters from database.ini params = config() # Connect to the PostgreSQL server conn = psycopg2.connect(**params) # Create new cursor cur = conn.cursor() today = datetime.strftime(date.today(), '%Y-%m-%d') for user in users: mfp_user = MFP_User(user, last_date) sql_entries = ''' INSERT INTO nutrition (mfp_username, entry_date, item, \ calories, protein, carbohydrates, fat, fiber, sugar, saturated_fat, \ polyunsaturated_fat, monounsaturated_fat, trans_fat, cholesterol, \ sodium, potassium, vitamin_a, vitamin_c, calcium, iron) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); ''' sql_no_entries = ''' INSERT INTO nutrition (mfp_username, entry_date) VALUES (%s, %s); ''' try: for day in mfp_user.data['Dates'].keys(): if day == 'Items': pass elif mfp_user.data['Dates'][day]: for item in mfp_user.data['Dates'][day]['Items'].keys(): cals = mfp_user.data['Dates'][day]['Items'][item][ 'Calories'] protein = mfp_user.data['Dates'][day]['Items'][item][ 'Protein'] carbs = mfp_user.data['Dates'][day]['Items'][item][ 'Carbohydrates'] fat = mfp_user.data['Dates'][day]['Items'][item]['Fat'] fiber = mfp_user.data['Dates'][day]['Items'][item][ 'Fiber'] sugar = mfp_user.data['Dates'][day]['Items'][item][ 'Sugar'] sat_fat = mfp_user.data['Dates'][day]['Items'][item][ 'Saturated Fat'] polyunsat_fat = mfp_user.data['Dates'][day]['Items'][ item]['Polyunsaturated Fat'] monounsat_fat = mfp_user.data['Dates'][day]['Items'][ item]['Monounsaturated Fat'] trans_fat = mfp_user.data['Dates'][day]['Items'][item][ 'Trans Fat'] cholesterol = mfp_user.data['Dates'][day]['Items'][ item]['Cholesterol'] sodium = mfp_user.data['Dates'][day]['Items'][item][ 'Sodium'] potassium = mfp_user.data['Dates'][day]['Items'][item][ 'Potassium'] vitamin_a = mfp_user.data['Dates'][day]['Items'][item][ 'Vitamin A'] vitamin_c = mfp_user.data['Dates'][day]['Items'][item][ 'Vitamin C'] calcium = mfp_user.data['Dates'][day]['Items'][item][ 'Calcium'] iron = mfp_user.data['Dates'][day]['Items'][item][ 'Iron'] cur.execute( sql_entries, (mfp_user.username, day, item, cals, protein, carbs, fat, fiber, sugar, sat_fat, polyunsat_fat, monounsat_fat, trans_fat, cholesterol, sodium, potassium, vitamin_a, vitamin_c, calcium, iron)) else: cur.execute(sql_no_entries, (mfp_user.username, day)) # commit the changes conn.commit() except (Exception, psycopg2.DatabaseError) as error: print(error) # close communication with the PostgreSQL database server conn.close() cur.close()
#!/usr/bin/python import psycopg2 from db.config import config import pdb import sys from db.cve_structure import create_cve params = config() def connect(): conn = None try: # Create a new database session and return a new connection object. conn = psycopg2.connect(**params) except (Exception, psycopg2.DatabaseError) as error: print(error) sys.exit(1) return conn def checkCVEExists(conn): cur = conn.cursor() cur.execute( "SELECT EXISTS(SELECT 1 FROM information_schema.tables WHERE table_catalog=%s AND table_schema='public' AND table_name=%s)", (params['database'], 'cve_new')) exists = cur.fetchone() if exists[0] is True: return True