def insert(review_trend: CitySentimentResult): with current_app.app_context(): insert_into = "INSERT INTO {} (".format(TABLE) values = "VALUES (" values_arr = [] if review_trend.id is not None: insert_into += "{}, ".format(COLUMNS[0]) values += "%s::uuid, " values_arr.append(review_trend.id) if review_trend.benchmark is not None: insert_into += "{}, ".format(COLUMNS[1]) values += "%s::uuid, " values_arr.append(review_trend.benchmark.benchmark_id) if review_trend.stars is not None: insert_into += "{}, ".format(COLUMNS[2]) values += "%s, " values_arr.append(review_trend.stars) if review_trend.sentiment is not None: insert_into += "{}, ".format(COLUMNS[3]) values += "%s, " values_arr.append(review_trend.sentiment) insert_into = insert_into[:-2] + ") " values = values[:-2] + ");" # execute and commit db = get_db() query = db.cursor().mogrify(insert_into + values, values_arr) db.cursor().execute(query, values_arr) db.commit()
def insert(sim: Sim1): with current_app.app_context(): insert_into = "INSERT INTO {} (".format(TABLE) values = "VALUES (" values_arr = [] if sim.id is not None: insert_into += "{}, ".format(COLUMNS[0]) values += "%s::uuid, " values_arr.append(sim.id) if sim.benchmark is not None: insert_into += "{}, ".format(COLUMNS[1]) values += "%s::uuid, " values_arr.append(sim.benchmark.benchmark_id) if sim.avg_ttas is not None: insert_into += "{}, ".format(COLUMNS[2]) values += "%s, " values_arr.append(sim.avg_ttas) if sim.avg_tth is not None: insert_into += "{}, ".format(COLUMNS[3]) values += "%s, " values_arr.append(sim.avg_tth) insert_into = insert_into[:-2] + ") " values = values[:-2] + ");" # execute and commit db = get_db() query = db.cursor().mogrify(insert_into + values, values_arr) db.cursor().execute(query, values_arr) db.commit()
def total(): with current_app.app_context(): conn = get_db() cur = conn.cursor() query = "SELECT count(id) FROM {}".format(TABLE) cur.execute(query) return cur.fetchone()
def set_status(db_name, status, app): if status == 'DOWN' or status == 'UP': with app.app_context(): conn = get_db() cur = get_db().cursor() query = "UPDATE {} SET status = %s WHERE name = %s".format(TABLE) logging.debug("Executing query: %s", query.replace('%s', '{}').format(status, db_name)) cur.execute(query, ( status, db_name, )) if cur.rowcount > 0: conn.commit() else: return None else: raise AttributeError('Database status may only be UP or DOWN')
def reset_processing_jobs(): with current_app.app_context(): conn = get_db() cur = conn.cursor() query = "UPDATE {} SET status = \'WAITING\' WHERE status = \'PROCESSING\'".format( TABLE) cur.execute(query) logging.debug("Executed query: %s", cur.query) conn.commit()
def is_job_being_processed(): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id FROM {} WHERE status = 'PROCESSING'".format(TABLE) cur.execute(query) if cur.rowcount > 0: return True else: return False
def set_as(benchmark_id, status): if status != 'WAITING' and status != 'PROCESSING' and status != 'COMPLETE': raise Exception( 'Status must be either WAITING, PROCESSING, or COMPLETE. Your input was "{}"', status) with current_app.app_context(): conn = get_db() cur = conn.cursor() query = "UPDATE {} SET status = %s WHERE id = %s".format(TABLE) cur.execute(query, (status, benchmark_id,)) logging.debug("Executed query: %s", cur.query) conn.commit()
def insert_log(server_log: ServerLog): with current_app.app_context(): conn = get_db() cur = conn.cursor() query = "INSERT INTO {} (captured_at, memory_perc) VALUES (%s, %s) RETURNING id".format( TABLE) cur.execute(query, ( server_log.captured_at, server_log.memory_perc, )) server_log.log_id = cur.fetchone()[0] conn.commit()
def find_name(row_name): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, name, description, icon FROM {} WHERE name = %s".format( TABLE) cur.execute(query, (row_name, )) if cur.rowcount > 0: result = dict(zip(COLUMNS, cur.fetchone())) else: return None return dataset_decoder(result)
def get_result(benchmark_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, benchmark_id, stars, sentiment " \ "FROM {} WHERE benchmark_id = %s::uuid".format(TABLE) cur.execute(query, (benchmark_id, )) if cur.rowcount > 0: result = dict(zip(COLUMNS, cur.fetchone())) else: return None return city_sentiment_decoder(result)
def insert_log(cpu_log: CPULog): with current_app.app_context(): conn = get_db() cur = conn.cursor() query = "INSERT INTO {} (system_log_id, core_id, cpu_perc) VALUES (%s, %s, %s) RETURNING id".format( TABLE) cur.execute(query, ( cpu_log.system_log_id, cpu_log.core_id, cpu_log.cpu_perc, )) cpu_log.log_id = cur.fetchone()[0] conn.commit()
def insert(benchmark): with current_app.app_context(): insert_into = "INSERT INTO {} (".format(TABLE) values = "VALUES (" values_arr = [] if benchmark.benchmark_id is not None: insert_into += "{}, ".format(COLUMNS[0]) values += "%s::uuid, " values_arr.append(benchmark.benchmark_id) if benchmark.database is not None: insert_into += "{}, ".format(COLUMNS[1]) values += "%s::uuid, " values_arr.append(benchmark.database) if benchmark.dataset is not None: insert_into += "{}, ".format(COLUMNS[2]) values += "%s::uuid, " values_arr.append(benchmark.dataset) if benchmark.analysis is not None: insert_into += "{}, ".format(COLUMNS[3]) values += "%s::uuid, " values_arr.append(benchmark.analysis) if benchmark.date_executed is not None: insert_into += "{}, ".format(COLUMNS[4]) values += "%s, " values_arr.append(benchmark.date_executed) if benchmark.query_time is not None: insert_into += "{}, ".format(COLUMNS[5]) values += "%s, " values_arr.append(benchmark.query_time) if benchmark.analysis_time is not None: insert_into += "{}, ".format(COLUMNS[6]) values += "%s, " values_arr.append(benchmark.analysis_time) if benchmark.status is not None: insert_into += "{}, ".format(COLUMNS[7]) values += "%s, " values_arr.append(benchmark.status) insert_into = insert_into[:-2] + ") " values = values[:-2] + ");" # execute and commit to reflect immediately for the job scheduler to see db = get_db() query = db.cursor().mogrify(insert_into + values, values_arr) logging.debug("Executing query: %s", query) db.cursor().execute(query, values_arr) db.commit()
def query_log(system_log_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, system_log_id, core_id, cpu_perc FROM {} WHERE system_log_id = %s ORDER BY core_id ASC".format( TABLE) cur.execute(query, (system_log_id, )) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [cpu_log_decoder(row) for row in rows]
def get_queries(analysis_id, database_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, analysis_id, database_id, query, language " \ "FROM {} WHERE analysis_id = %s::uuid AND database_id = %s::uuid".format(TABLE) cur.execute(query, (analysis_id, database_id)) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [query_decoder(row) for row in rows]
def find(row_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, database_id, dataset_id, analysis_id, date_executed, query_time, " \ "analysis_time, status FROM {} WHERE id = %s".format(TABLE) cur.execute(query, (row_id,)) if cur.rowcount > 0: result = dict(zip(COLUMNS, cur.fetchone())) else: return None deserialized = benchmark_decoder(result) return deserialized
def get_results(benchmark_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, benchmark_id, stars, length, cool, funny, useful, sentiment " \ "FROM {} WHERE benchmark_id = %s::uuid".format(TABLE) cur.execute(query, (benchmark_id, )) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [review_trend_decoder(row) for row in rows]
def get_results(benchmark_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, benchmark_id, business, sentiment_average, star_average, total_reviews " \ "FROM {} WHERE benchmark_id = %s::uuid".format(TABLE) cur.execute(query, (benchmark_id, )) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [kate_decoder(row) for row in rows]
def find(row_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, name, description, icon, status " \ "FROM {} WHERE id = %s".format(TABLE) cur.execute(query, (row_id, )) if cur.rowcount > 0: result = dict(zip(COLUMNS, cur.fetchone())) else: return None deserialized = database_decoder(result) return deserialized
def get_results(benchmark_id): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, benchmark_id, avg_ttas, avg_tth " \ "FROM {} WHERE benchmark_id = %s::uuid".format(TABLE) cur.execute(query, (benchmark_id, )) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [sim1_decoder(row) for row in rows]
def get_unstarted_jobs(): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, database_id, dataset_id, analysis_id, date_executed, query_time, " \ "analysis_time, status FROM {} WHERE status = 'WAITING'".format( TABLE) cur.execute(query) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [benchmark_decoder(row) for row in rows]
def update(benchmark: Benchmark): with current_app.app_context(): insert_into = "UPDATE {} SET ".format(TABLE) values_arr = [] if benchmark.database is not None: insert_into += "{} = ".format(COLUMNS[1]) insert_into += "%s::uuid, " values_arr.append(benchmark.database.database_id) if benchmark.dataset is not None: insert_into += "{} = ".format(COLUMNS[2]) insert_into += "%s::uuid, " values_arr.append(benchmark.dataset.dataset_id) if benchmark.analysis is not None: insert_into += "{} = ".format(COLUMNS[3]) insert_into += "%s::uuid, " values_arr.append(benchmark.analysis.analysis_id) if benchmark.date_executed is not None: insert_into += "{} = ".format(COLUMNS[4]) insert_into += "%s, " values_arr.append(benchmark.date_executed) if benchmark.query_time is not None: insert_into += "{} = ".format(COLUMNS[5]) insert_into += "%s, " values_arr.append(benchmark.query_time) if benchmark.analysis_time is not None: insert_into += "{} = ".format(COLUMNS[6]) insert_into += "%s, " values_arr.append(benchmark.analysis_time) if benchmark.status is not None: insert_into += "{} = ".format(COLUMNS[7]) insert_into += "%s, " values_arr.append(benchmark.status) # add where clause insert_into = insert_into[:-2] + " WHERE id = %s::uuid" values_arr.append(benchmark.benchmark_id) # execute and commit to reflect immediately for the job scheduler to see db = get_db() query = db.cursor().mogrify(insert_into, values_arr) logging.debug("Executing query: %s with %s", query, values_arr) db.cursor().execute(query, values_arr) db.commit()
def query_results(n=None): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, dataset_id, name, description " \ "FROM {} ORDER BY name DESC".format(TABLE) if n is None: cur.execute(query) else: cur.execute(query + " LIMIT %s", (str(n), )) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [analysis_decoder(row) for row in rows]
def get_performance(analysis_id): with current_app.app_context(): cur = get_db().cursor() query = 'SELECT databases.name, AVG(query_time) as avg, STDDEV_SAMP(query_time) as stddev ' \ 'FROM benchmarks ' \ 'JOIN databases ON database_id = databases.id ' \ 'WHERE benchmarks.analysis_id = %s ' \ 'GROUP BY databases.name ORDER BY databases.name' cur.execute(query, (analysis_id, )) perf_headings = ['name', 'avg', 'stddev'] rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(perf_headings, row))) else: return "No analysis results!" return rows
def paginate(page_size, page_number): with current_app.app_context(): conn = get_db() cur = conn.cursor() query = "SELECT * FROM {} ORDER BY date_executed DESC, status ASC LIMIT %s OFFSET %s".format(TABLE) try: offset = int(page_number) * int(page_size) cur.execute(query, (page_size, str(offset))) logging.debug("Executed query: %s", cur.query) except Exception as e: logging.error("Error while querying for pagination.", e) return None rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [benchmark_decoder(row) for row in rows]
def query_results(n=None): with current_app.app_context(): cur = get_db().cursor() query = "SELECT id, database_id, dataset_id, analysis_id, date_executed, query_time, " \ "analysis_time, status FROM {} ORDER BY date_executed DESC".format( TABLE) if n is None: cur.execute(query) else: cur.execute(query + " LIMIT %s", (str(n),)) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None deserialized = [benchmark_decoder(row) for row in rows] return deserialized
def insert(kate: KateResult): with current_app.app_context(): insert_into = "INSERT INTO {} (".format(TABLE) values = "VALUES (" values_arr = [] if kate.id is not None: insert_into += "{}, ".format(COLUMNS[0]) values += "%s::uuid, " values_arr.append(kate.id) if kate.benchmark is not None: insert_into += "{}, ".format(COLUMNS[1]) values += "%s::uuid, " values_arr.append(kate.benchmark.benchmark_id) if kate.business is not None: insert_into += "{}, ".format(COLUMNS[2]) values += "%s, " values_arr.append(kate.business) if kate.sentiment_average is not None: insert_into += "{}, ".format(COLUMNS[3]) values += "%s, " values_arr.append(kate.sentiment_average) if kate.star_average is not None: insert_into += "{}, ".format(COLUMNS[4]) values += "%s, " values_arr.append(kate.star_average) if kate.total_reviews is not None: insert_into += "{}, ".format(COLUMNS[5]) values += "%s, " values_arr.append(kate.total_reviews) insert_into = insert_into[:-2] + ") " values = values[:-2] + ");" # execute and commit db = get_db() query = db.cursor().mogrify(insert_into + values, values_arr) db.cursor().execute(query, values_arr) db.commit()
def query_logs(from_date=None, to_date=None): where_condition = None sql_args = () if from_date is not None and to_date is not None: where_condition = "WHERE captured_at >= %s AND captured_at <= %s" sql_args = ( from_date, to_date, ) elif from_date is not None: where_condition = "WHERE captured_at >= %s".format(from_date) sql_args = (from_date, ) elif to_date is not None: where_condition = "WHERE captured_at <= %s".format(to_date) sql_args = (to_date, ) with current_app.app_context(): cur = get_db().cursor() if where_condition is not None: query = "SELECT id, captured_at, memory_perc FROM {} {} ORDER BY captured_at ASC".format( TABLE, where_condition) else: query = "SELECT id, captured_at, memory_perc FROM {} ORDER BY captured_at ASC".format( TABLE) cur.execute(query, sql_args) rows = [] if cur.rowcount > 0: for row in cur.fetchall(): rows.append(dict(zip(COLUMNS, row))) else: return None return [server_log_decoder(row) for row in rows]