def __nonzero__(self): counter = self.db.query(SQL_SELECT + sql_count("*") + SQL_FROM + quote_column(self.sf.fact))[0][0] return bool(counter)
def __len__(self): counter = self.db.query(SQL_SELECT + sql_count("*") + SQL_FROM + quote_column(self.sf.fact))[0][0] return counter
elif len(edge_names) > 1: domain_names = ["d" + text(edge_index) + "c" + text(i) for i, _ in enumerate(edge_names)] query_edge.allowNulls = False domain_columns = [c for c in self.snowflake.columns if quote_column(c.es_column) in vals] if not domain_columns: domain_nested_path = "." Log.note("expecting a known column") else: domain_nested_path = domain_columns[0].nested_path domain_table = quote_column(concat_field(self.snowflake.fact_name, domain_nested_path[0])) limit = mo_math.min(query.limit, query_edge.domain.limit) domain = ( SQL_SELECT + sql_list(sql_alias(g, n) for n, g in zip(domain_names, vals)) + SQL_FROM + domain_table + nest_to_alias["."] + SQL_GROUPBY + sql_list(vals) + SQL_ORDERBY + sql_count(SQL_ONE) + SQL_DESC + SQL_LIMIT + text(limit) ) where = None join_type = SQL_LEFT_JOIN if query_edge.allowNulls else SQL_INNER_JOIN on_clause = SQL_AND.join( sql_iso( sql_iso( quote_column(edge_alias, k) + SQL_IS_NULL + SQL_AND + v + SQL_IS_NULL ) + SQL_OR + quote_column(edge_alias, k) + " = " + v ) for k, v in zip(domain_names, vals) ) null_on_clause = None
c for c in self.sf.columns if quote_column(c.es_column) in vals ] if not domain_columns: domain_nested_path = "." Log.note("expecting a known column") else: domain_nested_path = domain_columns[0].nested_path domain_table = quote_column( concat_field(self.sf.fact, domain_nested_path[0])) limit = mo_math.min(query.limit, query_edge.domain.limit) domain = (SQL_SELECT + sql_list( sql_alias(g, n) for n, g in zip(domain_names, vals)) + SQL_FROM + domain_table + nest_to_alias["."] + SQL_GROUPBY + sql_list(vals) + SQL_ORDERBY + sql_count(SQL_ONE) + SQL_DESC + SQL_LIMIT + text_type(limit)) where = None join_type = SQL_LEFT_JOIN if query_edge.allowNulls else SQL_INNER_JOIN on_clause = SQL_AND.join( sql_iso( sql_iso( join_column(edge_alias, k) + SQL_IS_NULL + SQL_AND + v + SQL_IS_NULL) + SQL_OR + join_column(edge_alias, k) + " = " + v) for k, v in zip(domain_names, vals)) null_on_clause = None elif isinstance(query_edge.domain, DefaultDomain): domain_names = [ quote_column("d" + text_type(edge_index) + "c" + text_type(i))
def _groupby_op(self, query, frum): base_table, path = tail_field(frum) schema = self.sf.tables[path].schema index_to_column = {} nest_to_alias = { nested_path: "__" + unichr(ord('a') + i) + "__" for i, (nested_path, sub_table) in enumerate(self.sf.tables.items()) } tables = [] for n, a in nest_to_alias.items(): if startswith_field(path, n): tables.append({"nest": n, "alias": a}) tables = jx.sort(tables, {"value": {"length": "nest"}}) from_sql = join_field( [base_table] + split_field(tables[0].nest)) + " " + tables[0].alias previous = tables[0] for t in tables[1::]: from_sql += (SQL_LEFT_JOIN + quote_column(concat_field(base_table, t.nest)) + " " + t.alias + SQL_ON + join_column(t.alias, quoted_PARENT) + " = " + join_column(previous.alias, quoted_UID)) selects = [] groupby = [] for i, e in enumerate(query.groupby): for edge_sql in e.value.to_sql(schema): column_number = len(selects) sql_type, sql = edge_sql.sql.items()[0] if sql is SQL_NULL and not e.value.var in schema.keys(): Log.error("No such column {{var}}", var=e.value.var) column_alias = _make_column_name(column_number) groupby.append(sql) selects.append(sql_alias(sql, column_alias)) if edge_sql.nested_path == ".": select_name = edge_sql.name else: select_name = "." index_to_column[column_number] = ColumnMapping( is_edge=True, push_name=e.name, push_column_name=e.name.replace("\\.", "."), push_column=i, push_child=select_name, pull=get_column(column_number), sql=sql, column_alias=column_alias, type=sql_type_to_json_type[sql_type]) for i, select in enumerate(listwrap(query.select)): column_number = len(selects) sql_type, sql = select.value.to_sql(schema)[0].sql.items()[0] if sql == 'NULL' and not select.value.var in schema.keys(): Log.error("No such column {{var}}", var=select.value.var) if select.value == "." and select.aggregate == "count": selects.append( sql_alias(sql_count(SQL_ONE), quote_column(select.name))) else: selects.append( sql_alias(sql_aggs[select.aggregate] + sql_iso(sql), quote_column(select.name))) index_to_column[column_number] = ColumnMapping( push_name=select.name, push_column_name=select.name, push_column=i + len(query.groupby), push_child=".", pull=get_column(column_number), sql=sql, column_alias=quote_column(select.name), type=sql_type_to_json_type[sql_type]) for w in query.window: selects.append(self._window_op(self, query, w)) where = query.where.to_sql(schema)[0].sql.b command = (SQL_SELECT + (sql_list(selects)) + SQL_FROM + from_sql + SQL_WHERE + where + SQL_GROUPBY + sql_list(groupby)) if query.sort: command += SQL_ORDERBY + sql_list( sql_iso(sql[t]) + SQL_IS_NULL + "," + sql[t] + (" DESC" if s.sort == -1 else "") for s, sql in [(s, s.value.to_sql(schema)[0].sql) for s in query.sort] for t in "bns" if sql[t]) return command, index_to_column