def populate_aucresult_table(fl: pd.DataFrame, db: sq.connect): global auc_name2ID, result_id with db.cursor() as cursor: auc_result = fl[[ 'Auction Name', 'Customer ID', 'Source Location ID', 'Sink Location ID', 'Buy/Sell', 'ClassType', 'Award FTR MW', 'Award FTR Price' ]] for row in auc_result.itertuples(index=False): auc_id = auc_name2ID[row[0]] c_id = row[1] source_id = row[2] sink_id = row[3] b_s = row[4] class_type = row[5] ftr_mw = float(row[6]) ftr_price = float(row[7]) try: sql = 'INSERT INTO `auction_results` (Auc_Res_ID, Auc_ID, C_ID, Sou_ID, Sin_ID, Buy_Sell, ' \ 'Class_Type, Award_FTR_MW, Award_FTR_Price ) VALUES (%s, %s, %s, %s, %s, \'%s\',' \ ' \'%s\', %s, %s)' % ( result_id, auc_id, c_id, source_id, sink_id, b_s, class_type, ftr_mw, ftr_price) result_id += 1 cursor.execute(sql) db.commit() except: db.rollback()
def send_data_db(cnx: pymysql.connect, line_list: list): global FLAG_RECONNECT if line_list[0] != 100: id = line_list[0] sensor_num = line_list[1] value = line_list[2] try: query = "INSERT INTO reading(id, sensor_num, value, time)" \ "VALUES (%s, %s, %s, %s)" args = [id, sensor_num, value, localtime()] cursor = cnx.cursor() cursor.execute(query, args) cnx.commit() cursor.close() except MySQLError as err: logger.error(err) cursor.close() cnx.close() FLAG_RECONNECT = True # logger.error(err.args[0]) if err.args[0] == 2013: logger.info('Flag: {}'.format(FLAG_RECONNECT)) else: logger.info('Bad data from Arduino: {}'.format(line_list)) FLAG_RECONNECT = True
def populate_lmp_table(db: sq.connect): global lmr # print(lmr) # lmr = pd.read_csv(lmr, skiprows=4) # lmr.drop(lmr.tail(1).index, inplace=True) # lmr.drop(0, inplace=True) with db.cursor() as cursor: data = lmr[[ 'Location ID', 'Hour Ending', 'Date', 'Locational Marginal Price', 'Energy Component', 'Congestion Component', 'Marginal Loss Component' ]] for row in data.itertuples(index=False): l_id = row[0] hour_ending = row[1] m, d, y = row[2].split('/') date = y + '-' + m + '-' + d lmp = row[3] ene_comp = row[4] cog_comp = row[5] ml_comp = row[6] try: sql = 'INSERT INTO `day_ahead_market` (L_ID, Hour_Ending, Date, LMP, Energy_Comp, Cog_Comp, ML_Comp) ' \ 'VALUES (%s, %s, \'%s\', %s, %s, %s, %s)' % ( l_id, hour_ending, date, lmp, ene_comp, cog_comp, ml_comp) cursor.execute(sql) db.commit() except: # print(sql) db.rollback()
def run_given_database_query(self, _db_name: str, _sql: str, _conn: pymysql.connect) -> list: """Run given sql statement""" try: with _conn.cursor() as _cursor: _cursor.execute(_sql) return _cursor.fetchall() except: _conn.rollback()
def create_database_utf8(self, _conn: pymysql.connect, _name: str): """Create a new database in utf-8 format :param _conn: database connection :param _name: database name :return: """ try: with _conn.cursor() as _cursor: _sql = f"CREATE DATABASE IF NOT EXISTS `{_name}` CHARACTER SET utf8 COLLATE utf8_general_ci" _cursor.execute(_sql) except: print(f'[Error]: Create database {_name} failed') _conn.rollback()
def insert_random_data_into_data2_table(self, _conn: pymysql.connect): """Insert data into data1 table :param _conn: database connection """ with open('./data2.txt', 'r') as f_in: _all_sql_statements = f_in.readlines() try: with _conn.cursor() as _cursor: for _sql in _all_sql_statements: _cursor.execute(_sql) _conn.commit() except: _conn.rollback()
def populate_customer_table(fl: pd.DataFrame, db: sq.connect): with db.cursor() as cursor: customer = fl[['Customer ID', 'Customer Name']].drop_duplicates() for row in customer.itertuples(index=False): c_id = row[0] c_name = row[1] try: sql = 'INSERT INTO `customer` (C_ID, C_Name) VALUES (%s, \'%s\')' % ( c_id, c_name) cursor.execute(sql) # print('1 row inserted') db.commit() except: # print('Failed to insert into Customer table, PK already exists.') db.rollback()
def get_temperatures_from_to(cnx: connect, from_date: datetime, to_date: datetime) -> list: # from_date_str = from_date.strftime(DATETIME_FORMAT) # to_date_str = to_date.strftime(DATETIME_FORMAT) # query = "SELECT value " \ # "FROM reading R " \ # "WHERE R.time BETWEEN '{}' AND '{}'".format(from_date, to_date) # query = "SELECT dato " \ # "FROM reading M " \ # "WHERE M.time BETWEEN '{}' AND '{}'".format(from_date, to_date) query = "SELECT value " \ "FROM reading R " \ "INNER JOIN (" \ "SELECT DATE (time) t_date, MIN(time) t_time " \ "FROM reading " \ "GROUP BY DATE(time), HOUR(time) " \ "HAVING t_date BETWEEN '{}' AND '{}'" \ ") T ON T.t_time = R.time ".format(from_date, to_date) cursor = cnx.cursor() cursor.execute(query) result_set = [r[0] for r in cursor] return result_set
def create_data1_table(self, _conn: pymysql.connect, _table_name: str = 'data1'): """ Create data1 table for testing :param _conn: database connection :param _table_name: default data1 :return: """ try: with _conn.cursor() as _cursor: _sql = f'CREATE TABLE IF NOT EXISTS {_table_name}(' \ f'id TEXT, order_id TEXT, cost FLOAT)' _cursor.execute(_sql) except: print(f'[Error]: Create table {_table_name} failed') _conn.rollback()
def insert_random_data_into_data_table(self, _conn: pymysql.connect): """Insert some random data into data table :param _conn: database connection """ try: with _conn.cursor() as _cursor: for _ in range(100): # Create a new record _letters = string.ascii_lowercase _name = ''.join(random.choice(_letters) for i in range(6)) _age = random.randint(20, 50) _sql = "INSERT INTO `data` (`name`, `age`) VALUES (%s, %s)" _cursor.execute(_sql, [_name, _age]) _conn.commit() except: _conn.rollback()
def populate_auction_table(fl: pd.DataFrame, db: sq.connect): global auc_name2ID, auction_id with db.cursor() as cursor: auction_name = fl['Auction Name'].unique() for name in auction_name: period = 365 if len(name.split(' ')) == 3: period = 30 try: sql = 'INSERT INTO `auction` (Auc_ID, Auc_Name, Period) ' \ 'VALUES (%s, \'%s\', %s)' % (auction_id, name, period) auc_name2ID[name] = auction_id cursor.execute(sql) auction_id += 1 db.commit() except: db.rollback()
def create_data_table(self, _conn: pymysql.connect, _table_name: str = 'data'): """Create the data table :param _conn: Database connection :param _table_name: table name, default 'data' :return: """ try: with _conn.cursor() as _cursor: _sql = f'CREATE TABLE IF NOT EXISTS {_table_name}(' \ f'id INT AUTO_INCREMENT PRIMARY KEY, ' \ f'name TEXT, age INT)' _cursor.execute(_sql) except: print(f'[Error]: Create table {_table_name} failed') _conn.rollback()
def MySql2Graph(): # Connect to database and initialize cursor PrintNow('Using {} ... '.format(mySqlDataBase), end = '') connection = MySqlConnect(user = '******', port = 3306, db = mySqlDataBase) cursor = connection.cursor() PrintNow('done') # id2Node PrintNow('Loading Nodes ... ', end = '') cursor.execute('''SELECT * FROM Nodes ;''') id2Node = {nodeId : Node(nodeId, *other) for nodeId, *other in cursor} PrintNow('found {:d}'.format(len(id2Node))) # id2Edge PrintNow('Loading Edges ... ', end = '') cursor.execute('''SELECT * FROM Edges ;''') id2Edge = {edgeId : Edge(edgeId, *other) for edgeId, *other in cursor} PrintNow('found {:d}'.format(len(id2Edge))) # id2Poi PrintNow('Loading POIs ... ', end = '') cursor.execute('''SELECT * FROM POIs ;''') id2Poi = {poiId : POI(poiId, *other) for poiId, *other in cursor} PrintNow('found {:d}'.format(len(id2Poi))) # id2Tree PrintNow('Loading Trees ... ', end = '') cursor.execute('''SELECT * FROM Trees ;''') id2Tree = {treeId : Tree(False, treeId, *other) for treeId, *other in cursor} PrintNow('found {:d}'.format(len(id2Tree))) # graphIds graphIds = list(nodeId for nodeId, node in id2Node.items() if node.isIntersection) PrintNow('Finished loading MySQL database `{}`.'.format(mySqlDataBase)) return id2Node, id2Edge, id2Poi, id2Tree, graphIds
def send_query(query: str, cnx: connect): try: cursor = cnx.cursor() cursor.execute(query) cnx.commit() cursor.close() except MySQLError as err: logger.info(type(err)) logger.error(err) # (2013, 'Lost connection to MySQL server during query ([Errno 110] Connection timed out)') if err.args[0] == 110 or 2013: Flag.connect_db = False logger.info('Flag.connect_db: {}'.format(Flag.connect_db)) # 1062, "Duplicate entry elif err.args[0] == 1062: logger.error(err) else: Flag.connect_db = True
def store_rating(conn: pymysql.connect, result, reply_count, thread_id): sql = """REPLACE INTO ratings (thread_id, reply_count, has_results, top1, top2, top3, top4, top5, count1, count2,count3, count4, count5 ) values (%s,%s,%s,%s,%s,%s,%s, %s, %s, %s, %s, %s, %s); """ has_results = False top1 = None top2 = None top3 = None top4 = None top5 = None count1 = None count2 = None count3 = None count4 = None count5 = None if len(result) >= 1: has_results = True top1 = list(result.keys())[0] count1 = result.get(top1) if len(result) > 1: top2 = list(result.keys())[1] count2 = result.get(top2) if len(result) > 2: top3 = list(result.keys())[2] count3 = result.get(top3) if len(result) > 3: top4 = list(result.keys())[3] count4 = result.get(top4) if len(result) > 4: top5 = list(result.keys())[4] count5 = result.get(top5) cur = conn.cursor() cur.execute(sql, [ thread_id, reply_count, has_results, top1, top2, top3, top4, top5, count1, count2, count3, count4, count5 ]) conn.commit()
def get_hours_from_to(cnx: connect, from_date: datetime, to_date: datetime) -> list: query = "SELECT hour(time) " \ "FROM reading R " \ "INNER JOIN (" \ "SELECT DATE (time) t_date, MIN(time) t_time " \ "FROM reading " \ "GROUP BY DATE(time), HOUR(time) " \ "HAVING t_date BETWEEN '{}' AND '{}'" \ ") T ON T.t_time = R.time ".format(from_date, to_date) cursor = cnx.cursor() cursor.execute(query) result_set = [r[0] for r in cursor] return result_set
def set_fk(db: sq.connect): with db.cursor() as cursor: # populate fk in auction result tables try: sql = 'ALTER TABLE `proj`.`auction_results` ' \ 'ADD CONSTRAINT `auction_fk` FOREIGN KEY (`Auc_ID`) REFERENCES `proj`.`auction` (`Auc_ID`) ' \ 'ON DELETE NO ACTION ON UPDATE NO ACTION;' cursor.execute(sql) db.commit() except: db.rollback() try: sql = 'ALTER TABLE `proj`.`auction_results` ' \ 'ADD CONSTRAINT `source_fk` FOREIGN KEY (`Sou_ID`) REFERENCES `proj`.`location` (`L_ID`) ' \ 'ON DELETE NO ACTION ON UPDATE NO ACTION;' cursor.execute(sql) db.commit() except: db.rollback() try: sql = 'ALTER TABLE `proj`.`auction_results` ' \ 'ADD CONSTRAINT `sink_fk` FOREIGN KEY (`Sin_ID`) REFERENCES `proj`.`location` (`L_ID`) ' \ 'ON DELETE NO ACTION ON UPDATE NO ACTION;' cursor.execute(sql) db.commit() except: db.rollback() try: sql = 'ALTER TABLE `proj`.`auction_results` ' \ 'ADD CONSTRAINT `sou_lmp_fk` FOREIGN KEY (`Sou_ID`) REFERENCES `proj`.`day_ahead_market` (`L_ID`) ' \ 'ON DELETE NO ACTION ON UPDATE NO ACTION;' cursor.execute(sql) db.commit() except: db.rollback() try: sql = 'ALTER TABLE `proj`.`auction_results` ' \ 'ADD CONSTRAINT `sin_lmp_fk` FOREIGN KEY (`Sin_ID`) REFERENCES `proj`.`day_ahead_market` (`L_ID`) ' \ 'ON DELETE NO ACTION ON UPDATE NO ACTION;' cursor.execute(sql) db.commit() except: db.rollback() try: sql = 'ALTER TABLE `proj`.`day_ahead_market` ADD CONSTRAINT `location_fk` FOREIGN KEY (`L_ID`) ' \ 'REFERENCES `proj`.`location` (`L_ID`) ' \ 'ON DELETE NO ACTION ON UPDATE NO ACTION;' cursor.execute(sql) db.commit() except: db.rollback()
def get_c_id(customer_name: str, db: sq.connect): with db.cursor() as cursor: sql = 'SELECT * FROM customer WHERE C_Name = \'%s\';' % customer_name cursor.execute(sql) return cursor.fetchone()
def CreateTables(id2Node, id2Edge, id2Poi, id2Tree, graphIds): # Helper def List2Str(l): return l and ','.join(str(e) for e in l if e) or '' # Initialize connection = MySqlConnect(user = '******', port = 3306, db = mySqlDataBase) cursor = connection.cursor() # Nodes PrintNow('Nodes TABLE ... ', end = '') cursor.execute('''DROP TABLE Nodes ;''') cursor.execute('''CREATE TABLE Nodes (id INT UNSIGNED NOT NULL PRIMARY KEY, isIntersection BOOLEAN, latitude DOUBLE NOT NULL, longitude DOUBLE NOT NULL, nodeIds TINYBLOB, edgeIds TINYBLOB, lengths TINYBLOB, poiIds TINYBLOB) ;''') for node in id2Node.values(): cursor.execute('''INSERT INTO Nodes(id, isIntersection, latitude, longitude, nodeIds, edgeIds, lengths, poiIds) VALUES ({0.id:d}, {1:d}, {0.latitude:f}, {0.longitude:f}, "{2:s}", "{3:s}", "{4:s}", "{5:s}") ;'''.format(node, bool(node.id in graphIds), List2Str(node.nodeIds), List2Str(node.edgeIds), List2Str(node.lengths), List2Str(node.poiIds))) connection.commit() PrintNow('inserted {:d} rows'.format(len(id2Node))) # Edges PrintNow('Edges TABLE ... ', end = '') cursor.execute('''DROP TABLE Edges ;''') cursor.execute('''CREATE TABLE Edges (id INT UNSIGNED NOT NULL PRIMARY KEY, name TINYBLOB NOT NULL, nodeIds TINYBLOB NOT NULL, treeCount INT UNSIGNED NOT NULL) ;''') for edge in id2Edge.values(): cursor.execute('''INSERT INTO Edges(id, name, nodeIds, treeCount) VALUES ({0.id:d}, "{1:s}", "{2:s}", {0.treeCount:d}) ;'''.format(edge, edge.name, List2Str(edge.nodeIds))) connection.commit() PrintNow('inserted {:d} rows'.format(len(id2Edge))) # POIs PrintNow('POIs TABLE ... ', end = '') cursor.execute('''DROP TABLE POIs ;''') cursor.execute('''CREATE TABLE POIs (id BIGINT NOT NULL PRIMARY KEY, poiType TINYBLOB NOT NULL, name TINYBLOB NOT NULL, nodeIds TINYBLOB NOT NULL, offsets TINYBLOB NOT NULL, latitude DOUBLE NOT NULL, longitude DOUBLE NOT NULL, address TINYBLOB NOT NULL, city TINYBLOB NOT NULL, state TINYBLOB NOT NULL, imageUrl TINYBLOB NOT NULL, yelpUrl TINYBLOB NOT NULL) ;''') count = 0 for poi in id2Poi.values(): if (poi.latitude is None or poi.longitude is None): continue count += 1 cursor.execute('''INSERT INTO POIs(id, poiType, name, nodeIds, offsets, latitude, longitude, address, city, state, imageUrl, yelpUrl) VALUES ({0.id:d}, "{0.poiType:s}", "{1:s}", "{2:s}", "{3:s}", {0.latitude:f}, {0.longitude:f}, "{4:s}", "{0.city:s}", "{0.state:s}", "{0.imageUrl:s}", "{0.yelpUrl:s}") ;'''.format(poi, poi.name, List2Str(poi.nodeIds), List2Str(poi.offsets), poi.address[0])) connection.commit() PrintNow('inserted {:d} rows'.format(count)) # Trees PrintNow('Trees TABLE .. ', end = '') cursor.execute('''DROP TABLE Trees ;''') cursor.execute('''CREATE TABLE Trees (id INT UNSIGNED NOT NULL PRIMARY KEY, variety TINYBLOB NOT NULL, latitude DOUBLE NOT NULL, longitude DOUBLE NOT NULL) ;''') count = 0 for tree in id2Tree.values(): if (tree.latitude is None or tree.longitude is None): continue count += 1 cursor.execute('''INSERT INTO Trees(id, variety, latitude, longitude) VALUES ({0.id:d}, "{0.variety:s}", {0.latitude:f}, {0.longitude:f}) ;'''.format(tree)) connection.commit() PrintNow('inserted {:d} rows'.format(count)) # Debug if False: cursor.execute('''SELECT * FROM POIs ;''') PrintNow(*('\t'.join(str(col) for col in row) for row in cursor), sep = '\n') # Garbage connection.close() return
def populate_location_table(fl: pd.DataFrame, db: sq.connect): global lmr with db.cursor() as cursor: lmr = pd.read_csv(lmr, skiprows=4) lmr.drop(lmr.tail(1).index, inplace=True) lmr.drop(0, inplace=True) # insert location id, location name, location type into location table # l_id, l_name and l_type are parsed from the data frame location = lmr[['Location ID', 'Location Name', 'Location Type']].drop_duplicates() for row in location.itertuples(index=False): l_id = row[0] l_name = row[1] l_type = row[2] try: sql = 'INSERT INTO `location` (L_ID, L_Name, L_Type) VALUES (%s, \'%s\', \'%s\')' % ( l_id, l_name, l_type) cursor.execute(sql) db.commit() except: db.rollback() # Insert location information again parsed from auction result data frame, this is for the case that something # appeared in the result table was not in the LMR table, doing this could help cover these missing locations location = fl[[ 'Source Location ID', 'Source Location Name', 'Source Location Type' ]].drop_duplicates() for row in location.itertuples(index=False): l_id = row[0] l_name = row[1] l_type = row[2] try: sql = 'INSERT INTO `location` (L_ID, L_Name, L_Type) VALUES (%s, \'%s\', \'%s\')' % ( l_id, l_name, l_type) cursor.execute(sql) db.commit() except: db.rollback() # do the same thing for sink location location = fl[[ 'Sink Location ID', 'Sink Location Name', 'Sink Location Type' ]].drop_duplicates() for row in location.itertuples(index=False): l_id = row[0] l_name = row[1] l_type = row[2] try: sql = 'INSERT INTO `location` (L_ID, L_Name, L_Type) VALUES (%s, \'%s\', \'%s\')' % ( l_id, l_name, l_type) cursor.execute(sql) db.commit() except: db.rollback()
def get_all_threads(conn: pymysql.connect): sql = """SELECT * FROM threads WHERE 1""" cur = conn.cursor() cur.execute(sql, []) return cur.fetchall()