def test_join_with_with(self): sub_query = (Query.from_(self.table_efg).select('fizz')) test_query = (Query.with_(sub_query, "an_alias").from_( self.table_abc).join(AliasedQuery('an_alias')).on( AliasedQuery('an_alias').fizz == self.table_abc.buzz).select( '*')) self.assertEqual( 'WITH an_alias AS (SELECT "fizz" FROM "efg") ' 'SELECT * FROM "abc" JOIN an_alias ON "an_alias"."fizz"="abc"."buzz"', str(test_query))
def test_with_more_than_one(self): s1 = Query.from_(self.table_efg).select("fizz") s2 = Query.from_("a1").select("foo") a1 = AliasedQuery("a1", s1) a2 = AliasedQuery("a2", s2) test_query = (Query.with_(s1, "a1").with_( s2, "a2").from_("a1").from_("a2").select(a1.fizz, a2.foo)) self.assertEqual( 'WITH a1 AS (SELECT "fizz" FROM "efg") ,a2 AS (SELECT "foo" FROM "a1") SELECT "a1"."fizz","a2"."foo" FROM "a1","a2"', str(test_query), )
def process_date_diff_args( self, args: str) -> Tuple[CustomFunction, CustomFunction]: arg_list = args.split(",") if len(arg_list) != 2: raise ValueError("Date_diff have invalid args", arg_list) result = [] for arg in arg_list: if arg == "now": date = current_date() else: date_fnc = CustomFunction("date", ["col1"]) if "." in arg: items = arg.split(".") topic_name = items[0].strip() topic = get_topic_by_name(topic_name, None) table = None if self.topic_space_filter: if self.topic_space_filter(self.param.topicId): alias_ = self.topic_space_filter( self.param.topicId)["alias"] table = AliasedQuery(alias_) if table is None: table = build_table_by_topic_id(topic.topicId) factor_name = items[1].strip() date = date_fnc(Field(factor_name, None, table)) else: date = date_fnc(arg) result.append(date) return tuple(result)
def build_indicators(indicators: List[ReportIndicator], dataset_columns: List[Column], dataset_query_alias): _selects = [] _appear_in_group_by = [] columns = convent_column_list_to_dict(dataset_columns) for indicator in indicators: column: Column = columns.get(indicator.columnId, None) if column is None: continue else: field = Field(column.alias, None, AliasedQuery(dataset_query_alias)) if indicator.arithmetic == "sum": _selects.append(fn.Sum(field)) elif indicator.arithmetic == "avg": _selects.append(fn.Avg(field)) elif indicator.arithmetic == "max": _selects.append(fn.Max(field)) elif indicator.arithmetic == "min": _selects.append(fn.Min(field)) elif indicator.arithmetic == "count": _selects.append(fn.Count(field)) else: _selects.append(field) _appear_in_group_by.append(field) return _selects, _appear_in_group_by
def test_select_from_with_returning(self): sub_query = PostgreSQLQuery.into( self.table_abc).insert(1).returning('*') test_query = (Query.with_(sub_query, "an_alias").from_( AliasedQuery("an_alias")).select("*")) self.assertEqual( 'WITH an_alias AS (INSERT INTO "abc" VALUES (1) RETURNING *) SELECT * FROM an_alias', str(test_query))
def test_insert_with_statement(self): sub_query = Query().select(self.table_abc.id).from_(self.table_abc) aliased = AliasedQuery('sub_qs') q = Query().with_(sub_query, 'sub_qs').into(self.table_abc).select(aliased.id).from_(aliased) self.assertEqual( 'WITH sub_qs AS (SELECT "id" FROM "abc") INSERT INTO "abc" SELECT "sub_qs"."id" FROM sub_qs', str(q) )
def test_with(self): sub_query = (Query.from_(self.table_efg).select('fizz')) test_query = (Query.with_(sub_query, "an_alias").from_( AliasedQuery("an_alias")).select('*')) self.assertEqual( 'WITH an_alias AS (SELECT "fizz" FROM "efg") SELECT * FROM an_alias', str(test_query))
def test_with_column_recursive(self): sub_query = (Query.from_(self.table_efg).select("fizz").union( Query.from_("an_alias").select("fizz"))) test_query = (Query.with_(sub_query, "an_alias", Field("fizz")).from_( AliasedQuery("an_alias")).select("*")) self.assertEqual( 'WITH RECURSIVE an_alias("fizz") AS ((SELECT "fizz" FROM "efg") UNION (SELECT "fizz" FROM "an_alias")) SELECT * FROM an_alias', str(test_query), )
def topic_handler_in_sub_query(self) -> Tuple[Field, ParameterValueType]: param = self.param for column in self.dataset_columns: if column.columnId == param.factorId: parser_ = ConsoleParameterParser(column.parameter, self.topic_space_filter, None, []) parse_result = parser_.parse_parameter() table = AliasedQuery(self.dataset_query_alias) field = Field(column.alias, None, table) value_type = parse_result.value_type return field, value_type
def test_update_with_statement(self): table_efg = Table('efg') sub_query = Query.from_(table_efg).select("fizz") an_alias = AliasedQuery("an_alias") q = (Query.with_( sub_query, "an_alias").update(self.table_abc).from_(an_alias).set( self.table_abc.lname, an_alias.long_name).where( self.table_abc.comp.eq(an_alias.alias_comp))) self.assertEqual( 'WITH an_alias AS (SELECT "fizz" FROM "efg") ' 'UPDATE "abc" SET "lname"="an_alias"."long_name" FROM an_alias ' 'WHERE "abc"."comp"="an_alias"."alias_comp"', str(q))
def get_query_by_user_filter( query: PostgreSQLQuery, filters: List[str], inhibit_filters: List[str]) -> PostgreSQLQuery: try: logging.debug('Получение запроса по критериям пользователя') t = Table('an_alias') test_query = (PostgreSQLQuery.with_(query, "an_alias").from_( AliasedQuery("an_alias")).select('*')) for filter in filters: test_query = test_query.where( t.object_name.like(Parameter('%s'))) for filter in inhibit_filters: test_query = test_query.where( t.object_name.not_like(Parameter('%s'))) except Exception as e: logging.error("Error is ", e) return test_query
def topic_handler_in_dataset( self) -> Tuple[Union[Field, CustomFunction], ParameterValueType]: param = self.param topic_id = param.topicId factor_id = param.factorId topic = get_topic_by_id(topic_id) table = None if self.topic_space_filter: if self.topic_space_filter(param.topicId): alias_ = self.topic_space_filter(param.topicId)["alias"] table = AliasedQuery(alias_) if table is None: table = build_table_by_topic_id(topic_id) factor = get_factor(factor_id, topic) result = Field(factor.name, None, table) value_type = factor.type return result, value_type
def build_dimensions(dimensions: List[ReportDimension], dataset_columns: List[Column], dataset_query_alias): _selects = [] _groupbys = [] _orderbys = [] columns = convent_column_list_to_dict(dataset_columns) for dimension in dimensions: column: Column = columns.get(dimension.columnId, None) if column is None: continue else: field = Field(column.alias, None, AliasedQuery(dataset_query_alias)) _selects.append( fn.Max(field) ) # need put dimension field in select expr, and max mean first in group by _groupbys.append(field) _orderbys.append(field) return _selects, _groupbys, _orderbys
def __build_chart_query(report, console_subject, current_user): q = build_dataset_query_for_subject(console_subject, current_user) dataset_query_alias = "chart_dataset" chart_query = PrestoQuery.with_(q, dataset_query_alias).from_( AliasedQuery(dataset_query_alias)) _indicator_selects, _indicator_in_group_by = build_indicators( report.indicators, console_subject.dataset.columns, dataset_query_alias) chart_query = chart_query.select(*_indicator_selects).groupby( *_indicator_in_group_by) _selects, _groupbys, _orderbys = build_dimensions( report.dimensions, console_subject.dataset.columns, dataset_query_alias) chart_query = chart_query.select(*_selects).groupby(*_groupbys) if report.chart.settings: truncation = report.chart.settings.get('truncation', None) if truncation: truncation_type = truncation.get('type') count = truncation.get('count', None) if truncation_type == "top": chart_query = chart_query.orderby(*_orderbys, order=Order.asc) elif truncation_type == "bottom": chart_query = chart_query.orderby(*_orderbys, order=Order.desc) else: chart_query = chart_query.orderby(*_orderbys) if count: chart_query = chart_query.limit(count) if report.filters: topic_space_filter = get_topic_sub_query_with_space_filter( console_subject, current_user) chart_query = chart_query.where( build_report_where(report.filters, topic_space_filter, dataset_query_alias, console_subject.dataset.columns)) if report.funnels: chart_query = chart_query.where( build_report_funnels(report.funnels, dataset_query_alias, console_subject.dataset.columns)) return chart_query
def build_query_for_consume(console_subject, indicators: List[Indicator], where_: Where, current_user): dataset_query = build_query_for_subject(console_subject, current_user) dataset_query_alias = "consume_dataset" consume_query = PrestoQuery.with_(dataset_query, dataset_query_alias).from_( AliasedQuery(dataset_query_alias)) if indicators: _select, _groupby = build_indicators(indicators, dataset_query_alias) consume_query = consume_query.select(*_select).groupby(*_groupby) else: consume_query = consume_query.select("*") if where_: filter_ = build_where(where_, dataset_query_alias) consume_query = consume_query.where(filter_) query_sql = consume_query.get_sql() log.info("sql:{0}".format(query_sql)) conn = get_connection() cur = conn.cursor() cur.execute(query_sql) rows = cur.fetchall() return rows
def build_dataset_query_for_subject(console_subject, current_user, for_count=False): dataset = console_subject.dataset if dataset is None: return None topic_space_filter = get_topic_sub_query_with_space_filter( console_subject, current_user) if dataset.joins and len(dataset.joins) > 0: topic_id = dataset.joins[0].topicId topic_table = topic_space_filter(topic_id) if topic_table: q = PrestoQuery.with_(topic_table["query"], topic_table["alias"]).from_( topic_table["alias"]) else: table = build_table_by_topic_id(topic_id) q = PrestoQuery.from_(table) else: topic_id = dataset.columns[0].parameter.topicId topic_table = topic_space_filter(topic_id) if topic_table: table = AliasedQuery(topic_table["alias"]) q = PrestoQuery.with_(topic_table["query"], topic_table["alias"]).from_(table) else: table = build_table_by_topic_id(topic_id) q = PrestoQuery.from_(table) for join in dataset.joins: right_topic_id = join.secondaryTopicId right_topic = get_topic_by_id(right_topic_id) right_topic_table = topic_space_filter(right_topic_id) if right_topic_table: q = q.with_(right_topic_table["query"], right_topic_table["alias"]) right_table = AliasedQuery(right_topic_table["alias"]) else: right_table = build_table_by_topic_id(right_topic_id) left_topic_id = join.topicId left_topic = get_topic_by_id(left_topic_id) left_topic_table = topic_space_filter(left_topic_id) if left_topic_table: left_table = AliasedQuery(left_topic_table["alias"]) else: left_table = build_table_by_topic_id(left_topic_id) left_factor = get_factor(join.factorId, left_topic) left_field = Field(left_factor.name, None, left_table) right_factor = get_factor(join.secondaryFactorId, right_topic) right_field = Field(right_factor.name, None, right_table) if join.type == "inner" or join.type == "": join_type = JoinType.inner elif join.type == "left": join_type = JoinType.left elif join.type == "right": join_type = JoinType.right else: join_type = JoinType.inner q = q.join(right_table, join_type).on(left_field.eq(right_field)) q = q.where(build_dataset_where(dataset.filters, topic_space_filter)) if for_count: return q.select(fn.Count("*")) else: return q.select( *build_dataset_select_fields(dataset.columns, topic_space_filter))
def read_account_points(self, rv: int, id_component_type: int) -> List[AccountPoint]: """ Выполняет чтение всех точек учета :return: массив точек учета """ a = Table('AccountPoint', alias='a') t = Table('Tag', alias='t') n = Table('Node', alias='n') n2 = Table('Node', alias='n2') n3 = Table('Node', alias='n3') n4 = Table('Node', alias='n4') p = Table('ObjPassport', alias='p') c = Table('ComponentType', alias='c') s = Table('DataString', alias='c') sub_query = ( Query.from_(a).join( t, how=JoinType.inner).on(a.Id == t.AccountId).join( n, how=JoinType.inner).on(a.DriverId == n.Id).join( c, how=JoinType.inner).on(n.IdComponentType == c.Id).join( n2, how=JoinType.inner).on(a.Id == n2.Id).join( n3, how=JoinType.inner). on((n2.IdOwn == n3.IdOwn) & (n3.BrowseName == 'Diagnostic')).join( n4, how=JoinType.inner).on((n3.Id == n4.IdOwn) & (n4.BrowseName == 'SerialNumber')). where((a.Del == self._FALSE_) & (t.TagCategory == 'CounterAplus') & (t.TagName == 'Day1') & (a.Rv > 0))) if self.__needRvCast: sub_query = sub_query.select(a.Id, a.DisplayName, t.IdTagDef, a.DriverId, c.Guid, Cast(a.Rv, 'BIGINT', 'Rv'), n3.BrowseName, n4.Id.as_('serial_num')) else: sub_query = sub_query.select(a.Id, a.DisplayName, t.IdTagDef, a.DriverId, c.Guid, a.Rv, n3.BrowseName, n4.Id.as_('serial_num')) ap = AliasedQuery("ap") query = (Query.select( ap.Id, ap.DisplayName, ap.IdTagDef, ap.Rv, p.PostIndex, p.FlatS, p.Flat, p.Region, p.Country, ap.serial_num, Case().when( ap.Guid == 'est.by:Bus.GranDrvClientImpl', 'СС-301').when( ap.Guid == 'est.by:Bus.Gran101DrvClientImpl', 'СС-101').when( ap.Guid == 'est.by:Bus.EmeraDrvClientImpl', 'СЕ-102').else_("UNKNOWN").as_('Driver')).with_( sub_query, "ap").from_(ap).join(n, how=JoinType.left). on((n.IdOwn == ap.Id) & (n.IdComponentType == id_component_type)).join( p, how=JoinType.left).on(n.Id == p.Id)) sql = query.get_sql() ret_val: List[AccountPoint] = [] try: self._logger.debug(f'SQL: {sql}') result = self._engine_main.execute(sql) for row in result: data = AccountPoint(row['Id'], row['DisplayName'], row['IdTagDef'], row['PostIndex'], row['FlatS'], row['Flat'], row['Region'], row['Country'], row['serial_num'], row['Driver'], row['Rv']) ret_val.append(data) except Exception as e: self._logger.error(e) return ret_val