def get_factor_value_by_subject_and_condition(console_subject, factor_name_list, filter_list, current_user): query = build_query_for_subject(console_subject, current_user) if filter_list: query = _filter(query, filter_list) conn = get_connection() cur = conn.cursor() sql = query.get_sql() cur.execute(sql) rows = cur.fetchall() index_list = __find_factor_index(cur.description, factor_name_list) results = [] if index_list: for rw in rows: row_data = [] for index in index_list: row_data.append(rw[index]) results.append(row_data) return results else: raise KeyError("factor_name :{0} can't find in subject {1}".format( factor_name_list, console_subject.name))
def load_chart_dataset_temp(report): query = build_query_for_subject_chart(report.reportId, report) conn = get_connection() query_sql = query.get_sql() log.info("sql: {0}".format(query_sql)) cur = conn.cursor() cur.execute(query_sql) rows = cur.fetchall() log.debug("sql result: {0}".format(rows)) return rows
def load_slow_pipeline_status(top_n): sql = "SELECT complete_time,status,pipelineId FROM monitor_pipeline ORDER BY complete_time DESC LIMIT {0}".format( top_n) cur = get_connection().cursor() cur.execute(sql) rows = cur.fetchall() results = [] for row in rows: row[0] = str(row[0]) + "ms" row[2] = load_pipeline_by_id(row[2]).name results.append(row) return results
def __load_chart_dataset(query, query_monitor=None): start = time.time() conn = get_connection() query_sql = query.get_sql() query_sql_summary = build_query_summary(query_sql) log.info("sql: {0}".format(query_sql)) cur = conn.cursor() cur.execute(query_sql) rows = cur.fetchall() log.debug("sql result: {0}".format(rows)) query_sql_summary.resultSummary = build_result_summary(rows, start) if query_monitor: query_monitor.querySummaryList.append(query_sql_summary) query_monitor.executionTime = time.time() - start return rows
async def load_dataset_by_subject_id(subject_id, pagination: Pagination, current_user): console_subject = load_console_subject_by_id(subject_id, current_user) query_monitor: QueryMonitor = build_query_monitor(console_subject, query_type="dataset") try: # build query condition start = time.time() count_query = build_count_query_for_subject(console_subject, current_user) count_sql = count_query.get_sql() query_count_summary = build_query_summary(count_sql) conn = get_connection() cur = conn.cursor() log.info("sql count:{0}".format(count_sql)) cur.execute(count_sql) count_rows = cur.fetchone() log.info("sql result: {0}".format(count_rows)) query_count_summary.resultSummary = build_result_summary( count_rows, start) query_monitor.querySummaryList.append(query_count_summary) query_start = time.time() query = build_query_for_subject(console_subject, current_user) # query_sql = build_page_by_row_number(pagination, query) query_sql = build_pagination(query.get_sql(), pagination) query_summary = build_query_summary(query_sql) log.info("sql:{0}".format(query_sql)) print(query_sql) cur = conn.cursor() cur.execute(query_sql) rows = cur.fetchall() log.debug("sql result: {0}".format(rows)) query_summary.resultSummary = build_result_summary(rows, query_start) query_monitor.querySummaryList.append(query_summary) query_monitor.executionTime = time.time() - start return rows, count_rows[0] except Exception as e: log.exception(e) query_monitor.error = traceback.format_exc() query_monitor.success = False finally: await save_query_monitor_data(query_monitor)
def load_dataset_by_subject_id(subject_id, pagination: Pagination): ##TODO report monitor console_subject = load_console_subject_by_id(subject_id) query = build_query_for_subject(console_subject) count_query = build_count_query_for_subject(console_subject) conn = get_connection() cur = conn.cursor() count_sql = count_query.get_sql() log.info("sql count:{0}".format(count_sql)) cur.execute(count_sql) count_rows = cur.fetchone() log.info("sql result: {0}".format(count_rows)) query_sql = query.get_sql() + " " + build_pagination(pagination) log.info("sql:{0}".format(query_sql)) cur = conn.cursor() cur.execute(query_sql) rows = cur.fetchall() log.debug("sql result: {0}".format(rows)) return rows, count_rows[0]
def get_factor_value_by_subject_and_condition(console_subject, factor_name, filter_list): query = build_query_for_subject(console_subject) if filter_list: query = _filter(query, filter_list) conn = get_connection() cur = conn.cursor() sql = query.get_sql() # print(sql) cur.execute(sql) rows = cur.fetchall() # print(rows) # print(cur.description) index = __find_factor_index(cur.description, factor_name) # print(rows) if index is not None: results = [] for rw in rows: results.append(rw[index]) return results else: raise KeyError("factor_name :{0} can't find in subject {1}".format(factor_name, console_subject.name))
def build_query_for_consume(console_subject, indicators: List[Indicator], where_: Where, current_user): dataset_query = build_query_for_subject(console_subject, current_user) dataset_query_alias = "consume_dataset" consume_query = PrestoQuery.with_(dataset_query, dataset_query_alias).from_( AliasedQuery(dataset_query_alias)) if indicators: _select, _groupby = build_indicators(indicators, dataset_query_alias) consume_query = consume_query.select(*_select).groupby(*_groupby) else: consume_query = consume_query.select("*") if where_: filter_ = build_where(where_, dataset_query_alias) consume_query = consume_query.where(filter_) query_sql = consume_query.get_sql() log.info("sql:{0}".format(query_sql)) conn = get_connection() cur = conn.cursor() cur.execute(query_sql) rows = cur.fetchall() return rows