def move_data(dbset): try: db = DB(**dbset) db.begin() query = db.query("SELECT bluetooth.move_raw_data();") if query.getresult()[0][0] != 1: db.rollback() raise DatabaseError( 'bluetooth.move_raw_data did not complete successfully') query = db.query("TRUNCATE bluetooth.raw_data;") query = db.query("SELECT king_pilot.load_bt_data();") if query.getresult()[0][0] != 1: db.rollback() raise DatabaseError( 'king_pilot.load_bt_data did not complete successfully') db.query( 'DELETE FROM king_pilot.daily_raw_bt WHERE measured_timestamp < now()::DATE;' ) db.commit() except DatabaseError as dberr: LOGGER.error(dberr) db.rollback() except IntegrityError: LOGGER.critical( 'Moving data failed due to violation of a constraint. Data will have to be moved manually' ) finally: db.close()
def update_configs(all_analyses, dbset): ''' Syncs configs from blip server with database and returns configs to pull data from. :param all_analyses: List of blip configurations :param dbset: Dictionary to connect to PostgreSQL database ''' db = DB(**dbset) db.begin() db.query('''TRUNCATE bluetooth.all_analyses_day_old; INSERT INTO bluetooth.all_analyses_day_old SELECT * FROM bluetooth.all_analyses;''' ) db.commit() analyses_pull_data = {} for report in all_analyses: report.outcomes = [outcome.__json__() for outcome in report.outcomes] report.routePoints = [ route_point.__json__() for route_point in report.routePoints ] row = dict(device_class_set_name=report.deviceClassSetName, analysis_id=report.id, minimum_point_completed=db.encode_json( report.minimumPointCompleted.__json__()), outcomes=report.outcomes, report_id=report.reportId, report_name=report.reportName, route_id=report.routeId, route_name=report.routeName, route_points=report.routePoints) #If upsert fails, log error and continue, don't add analysis to analyses to pull try: upserted = db.upsert('bluetooth.all_analyses', row, pull_data='included.pull_data') analyses_pull_data[upserted['analysis_id']] = { 'pull_data': upserted['pull_data'], 'report_name': upserted['report_name'] } except IntegrityError as err: LOGGER.error(err) db.close() analyses_to_pull = { analysis_id: analysis for (analysis_id, analysis) in analyses_pull_data.items() if analysis['pull_data'] } return analyses_to_pull
def db_input(tweet): tweet = tweet.split(" "); hashtags = [] words = [] for word in tweet: if len(word) > 3 and special_match(word, plaintext) == True: word = word.lower() if "#" == word[0]: hashtags.append(word) else: words.append(word) insert_update = ("with w as (insert into words (word) values ('{}') on conflict (word) do update set word = words.word returning (wid)), h as (insert into hashtags (hashtag) values ('{}') on conflict (hashtag) do update set hashtag = hashtags.hashtag returning (hid)) insert into links (wid, hid, weight) values ((select * from w), (select * from h), 1) on conflict (wid, hid) do update set weight = links.weight + 1") db = DB(dbname=database, host=host, post=int(port), user=user, passwd=password) db.begin() for word in words: for hashtag in hashtags: db.query(insert_update.format(word, hashtag)) db.commit()
class vol_utils(object): def __init__(self): self.logger = logging.getLogger('volume_project.sql_utilities') self.db_connect() def db_connect(self): CONFIG = configparser.ConfigParser() CONFIG.read('db.cfg') dbset = CONFIG['DBSETTINGS'] self.db = DB(dbname=dbset['database'], host=dbset['host'], user=dbset['user'], passwd=dbset['password']) self.logger.info('Database connected.') def exec_file(self, filename): try: f = open(filename) exec(filename) except: for root_f, folders, files in os.walk('.'): if filename in files: f = root_f + '/' + filename break self.logger.info('Running ', f) exec(f) if f is None: self.logger.error('File %s not found!', filename) raise Exception('File %s not found!', filename) def execute_sql(self, filename): f = None try: f = open(filename) except: for root_f, folders, files in os.walk('.'): if filename in files: f = open(root_f + '/' + filename) if f is None: self.logger.error('File %s not found!', filename) raise Exception('File not found!') sql = f.read() reconnect = 0 while True: try: self.db.query(sql) self.db.commit() return except ProgrammingError as pe: self.logger.error('Error in SQL', exc_info=True) self.db_connect() reconnect += 1 if reconnect > 5: raise Exception('Check DB connection. Cannot connect') def get_sql_results(self, filename, columns, replace_columns=None, parameters=None): ''' Input: filename columns: a list of column names replace_columns: a dictionary of {placeholders:real strings} parameters: list of parameter values Output: dataframe of results ''' f = None try: f = open(filename) except: for root_f, folders, files in os.walk('.'): if filename in files: f = open(root_f + '/' + filename) if f is None: if filename[: 6] == 'SELECT': # Also accepts sql queries directly in string form sql = filename else: self.logger.error('File %s not found!', filename) raise Exception('File not found!') else: sql = f.read() if replace_columns is not None: for key, value in replace_columns.items(): sql = sql.replace(key, str(value)) reconnect = 0 while True: try: if parameters is not None: return pd.DataFrame(self.db.query(sql, parameters).getresult(), columns=columns) else: return pd.DataFrame(self.db.query(sql).getresult(), columns=columns) except ProgrammingError as pe: self.logger.error('Error in SQL', exc_info=True) self.db_connect() reconnect += 1 if reconnect > 5: raise Exception('Check Error Message') def load_pkl(self, filename): f = None try: f = open(filename, "rb") except: for root_f, folders, files in os.walk('.'): if filename in files: f = open(root_f + '/' + filename) if f is None: self.logger.error('File %s not found!', filename) raise Exception('File not found!') return pickle.load(f) def truncatetable(self, tablename): reconnect = 0 while True: try: self.db.truncate(tablename) self.db.commit() self.logger.info('%s truncated', tablename) return except ProgrammingError as pe: print(pe) self.db_connect() reconnect += 1 if reconnect > 5: self.logger.error('Error in SQL', exc_info=True) raise Exception('Check Error Message') def inserttable(self, tablename, content): reconnect = 0 while True: try: self.db.inserttable(tablename, content) self.db.commit() self.logger.info('Inserted table: %s', tablename) break except ProgrammingError: self.db_connect() reconnect += 1 if reconnect > 5: self.logger.error('Error in SQL', exc_info=True) raise Exception('Check Error Message') def __exit__(self): self.db.close()
# db.query("ALTER TABLE "+config.get('summary_stats', 'shp_table_name')+" ADD column attributes TEXT") # # print "importing raster" # db.query("DROP TABLE IF EXISTS "+config.get('summary_stats', 'raster_table_name')) # p1 = subprocess.Popen(['/usr/bin/raster2pgsql', '-d', '-s', config.get('summary_stats', 'raster_file_srs'), '-t', '100x100', config.get('summary_stats', 'raster_file'), # config.get('summary_stats', 'raster_table_name')], stdout=subprocess.PIPE) # p2 = subprocess.Popen(['/usr/bin/psql', '-p', '5432', config.get('database', 'dbname'), '-U', config.get('database', 'user'), '-h', '127.0.0.1'], # stdin=p1.stdout, stdout=subprocess.PIPE, env=psql_env) # p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. # output,err = p2.communicate() # # db.query('CREATE INDEX dem_st_convexhull_idx ON '+config.get('summary_stats', 'raster_table_name')+' ' # 'USING gist ((st_convexhull(rast)) public.gist_geometry_ops_2d)') print("updating attributes") q = db.query("SELECT count(*) as count_all from " + config.get('summary_stats', 'shp_table_name')) count_all = q.dictresult()[0]["count_all"] steps = count_all / 500 for s in range(0, steps): try: db.begin() db.query('select raster.updtae_attributes($1, $2, $3, $4)', (config.get('summary_stats', 'raster_table_name'), config.get('summary_stats', 'shp_table_name'), 500, s * 500)) db.commit() print("processed " + str((s * 500))) except Exception, e: print(str(e)) pass
src = cfg[args.src] print("") print("------------------------------------------------------------------------------------") print("Source: " + str(args.src)) print("------------------------------------------------------------------------------------") srcdb = DB(dbname=src["db"], host=src["host"], port=int(src["port"]), user=src["user"], passwd=src["password"]) for srv in args.dst: item = cfg[srv] print("") print("------------------------------------------------------------------------------------") print("Destination: " + str(srv)) print("------------------------------------------------------------------------------------") dstdb = DB(dbname=item["db"], host=item["host"], port=int(item["port"]), user=item["user"], passwd=item["password"]) for table in tables: dstdb.start() rows = srcdb.query('SELECT * FROM %s' % table).getresult() dstdb.query('CREATE TEMPORARY TABLE newvals ON COMMIT DROP AS TABLE %s WITH NO DATA' % table) dstdb.inserttable('newvals', rows) dstdb.query('LOCK TABLE %s IN EXCLUSIVE MODE' % table) print(upd.get(table)) dstdb.query(upd.get(table)) print(insert.get(table)) dstdb.query(insert.get(table)) dstdb.commit()
class BotDB: def __init__(self, db_url): urlparse.uses_netloc.append("postgres") self.__db_url = db_url url = urlparse.urlparse(db_url) self.__db = DB( dbname=url.path[1:], user=url.username, passwd=url.password, host=url.hostname, port=url.port ) def insertThesis(self, init_id, chat_id, user_id, body): ts = time.time() timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') print("inserting thesis") print(init_id, chat_id, user_id, body, timestamp) self.__db.insert("theses", row={"init_id": init_id, "chat_id": chat_id, "user_id": user_id, "body": body}) print("done") self.__db.commit() def getThesisByIds(self, init_id, chat_id): query = self.__db.query("SELECT * FROM theses WHERE init_id = %d AND chat_id = %d;" % (init_id, chat_id)) dict_res = query.dictresult() if len(dict_res) == 0: return False else: return dict_res[0] def getThesisByBody(self, body): query = self.__db.query("SELECT * FROM theses WHERE body = '%s';" % body) dict_res = query.dictresult() if len(dict_res) == 0: return False else: return dict_res[0] def getLastThesesByTime(self, chat_id, interval): query = self.__db.query("SELECT * FROM theses WHERE chat_id = %s AND creation_time > current_timestamp - interval '%s';" % (chat_id, interval)) dict_res = query.dictresult() if len(dict_res) == 0: return False else: return dict_res def getTodayTheses(self, chat_id): query = self.__db.query("SELECT * FROM theses WHERE chat_id = %s AND creation_time > current_date;" % chat_id) dict_res = query.dictresult() if len(dict_res) == 0: return False else: return dict_res def insertUser(self, user_id, username, first_name, last_name): # ts = time.time() row = {"user_id":user_id} if username: row["username"] = username if first_name: row["first_name"] = first_name if last_name: row["last_name"] = last_name self.__db.insert('users', row=row) self.__db.commit() def getUserById(self, user_id): query = self.__db.query("SELECT * FROM users WHERE user_id = %d;" % user_id) dict_res = query.dictresult() if len(dict_res) == 0: return False else: return dict_res[0] def insertBotMessage(self, chat_id, message_id, owner_id): row = {"chat_id": chat_id, "message_id": message_id, "owner_id": owner_id} self.__db.insert('bot_messages', row=row) self.__db.commit() def getBotMessage(self, chat_id, message_id): query = self.__db.query("SELECT * FROM bot_messages WHERE chat_id = %d AND message_id = %d;" % (chat_id, message_id)) dict_res = query.dictresult() if len(dict_res) == 0: return False else: return dict_res[0] def close(self): self.__db.close()
for srv in args.dst: item = cfg[srv] print("") print( "------------------------------------------------------------------------------------" ) print("Destination: " + str(srv)) print( "------------------------------------------------------------------------------------" ) dstdb = DB(dbname=item["db"], host=item["host"], port=int(item["port"]), user=item["user"], passwd=item["password"]) for table in tables: dstdb.start() rows = srcdb.query('SELECT * FROM %s' % table).getresult() dstdb.query( 'CREATE TEMPORARY TABLE newvals ON COMMIT DROP AS TABLE %s WITH NO DATA' % table) dstdb.inserttable('newvals', rows) dstdb.query('LOCK TABLE %s IN EXCLUSIVE MODE' % table) print(upd.get(table)) dstdb.query(upd.get(table)) print(insert.get(table)) dstdb.query(insert.get(table)) dstdb.commit()
# db.query("ALTER TABLE "+config.get('summary_stats', 'shp_table_name')+" ADD column attributes TEXT") # # print "importing raster" # db.query("DROP TABLE IF EXISTS "+config.get('summary_stats', 'raster_table_name')) # p1 = subprocess.Popen(['/usr/bin/raster2pgsql', '-d', '-s', config.get('summary_stats', 'raster_file_srs'), '-t', '100x100', config.get('summary_stats', 'raster_file'), # config.get('summary_stats', 'raster_table_name')], stdout=subprocess.PIPE) # p2 = subprocess.Popen(['/usr/bin/psql', '-p', '5432', config.get('database', 'dbname'), '-U', config.get('database', 'user'), '-h', '127.0.0.1'], # stdin=p1.stdout, stdout=subprocess.PIPE, env=psql_env) # p1.stdout.close() # Allow p1 to receive a SIGPIPE if p2 exits. # output,err = p2.communicate() # # db.query('CREATE INDEX dem_st_convexhull_idx ON '+config.get('summary_stats', 'raster_table_name')+' ' # 'USING gist ((st_convexhull(rast)) public.gist_geometry_ops_2d)') print("updating attributes") q = db.query("SELECT count(*) as count_all from "+config.get('summary_stats', 'shp_table_name')) count_all = q.dictresult()[0]["count_all"] steps = count_all/500 for s in range(0, steps): try: db.begin() db.query('select raster.updtae_attributes($1, $2, $3, $4)', (config.get('summary_stats', 'raster_table_name'), config.get('summary_stats', 'shp_table_name'), 500, s*500)) db.commit() print("processed "+str((s * 500))) except Exception, e: print(str(e)) pass