def to_bq(self, schema, not_null=False, boolean=False): lhs = BQLang[self.lhs].to_bq(schema)[0].sql.n rhs = BQLang[self.rhs].to_bq(schema)[0].sql.n d = BQLang[self.default].to_bq(schema)[0].sql.n if lhs and rhs: if d == None: return wrap( [{"name": ".", "sql": {"n": sql_iso(lhs) + " / " + sql_iso(rhs)}}] ) else: return wrap( [ { "name": ".", "sql": { "n": sql_coalesce( [sql_iso(lhs) + " / " + sql_iso(rhs), d] ) }, } ] ) else: return Null
def to_bq(self, schema, not_null=False, boolean=False): value = BQLang[self.term].to_bq(schema, not_null=True) acc = [] for c in value: for t, v in c.sql.items(): if t == "s": acc.append("CAST(" + v + " as FLOAT)") else: acc.append(v) if not acc: return wrap([]) elif len(acc) == 1: return wrap([{"name": ".", "sql": {"n": acc[0]}}]) else: return wrap([{"name": ".", "sql": {"n": sql_coalesce(acc)}}])
def to_sql(self, schema, not_null=False, boolean=False): acc = {"b": [], "s": [], "n": [], "0": []} for term in self.terms: for t, v in SQLang[term].to_sql(schema)[0].sql.items(): acc[t].append(v) output = {} for t, terms in acc.items(): if not terms: continue elif len(terms) == 1: output[t] = terms[0] else: output[t] = sql_coalesce(terms) return wrap([{"name": ".", "sql": output}])
def to_bq(self, schema, not_null=False, boolean=False): test = BQLang[self.term].missing().to_bq(schema, boolean=True)[0].sql.b value = BQLang[self.term].to_bq(schema, not_null=True)[0].sql acc = [] for t, v in value.items(): if t == "b": acc.append(SQL_CASE + SQL_WHEN + sql_iso(test) + SQL_THEN + SQL_NULL + SQL_WHEN + sql_iso(v) + SQL_THEN + "'true'" + SQL_ELSE + "'false'" + SQL_END) elif t == "s": acc.append(v) else: acc.append("RTRIM(RTRIM(CAST" + sql_iso(v + " as TEXT), " + quote_value("0")) + ", " + quote_value(".") + ")") if not acc: return wrap([{}]) elif len(acc) == 1: return wrap([{"name": ".", "sql": {"s": acc[0]}}]) else: return wrap([{"name": ".", "sql": {"s": sql_coalesce(acc)}}])
index_to_column[column_number] = ColumnMapping( push_name=s.name, push_column_name=s.name, push_column=si, push_child=name, pull=get_column(column_number), sql=full_sql, column_alias=_make_column_name(column_number), type="number") else: # STANDARD AGGREGATES for details in SQLang[s.value].partial_eval().to_sql(schema): for sql_type, sql in details.sql.items(): column_number = len(outer_selects) sql = sql_aggs[s.aggregate] + sql_iso(sql) if s.default != None: sql = sql_coalesce([sql, quote_value(s.default)]) outer_selects.append( sql_alias(sql, _make_column_name(column_number))) index_to_column[column_number] = ColumnMapping( push_name=s.name, push_column_name=s.name, push_column=si, push_child= ".", # join_field(split_field(details.name)[1::]), pull=get_column(column_number), sql=sql, column_alias=_make_column_name(column_number), type=sql_type_to_json_type[sql_type]) for w in query.window: outer_selects.append(self._window_op(self, query, w))
def _groupby_op(self, query, schema): base_table = schema.snowflake.fact_name path = schema.nested_path # base_table, path = tail_field(frum) # schema = self.snowflake.tables[path].schema index_to_column = {} nest_to_alias = { nested_path: "__" + unichr(ord('a') + i) + "__" for i, nested_path in enumerate(self.schema.snowflake.query_paths) } tables = [] for n, a in nest_to_alias.items(): if startswith_field(path, n): tables.append({"nest": n, "alias": a}) tables = jx.sort(tables, {"value": {"length": "nest"}}) from_sql = join_field( [base_table] + split_field(tables[0].nest)) + " " + tables[0].alias previous = tables[0] for t in tables[1::]: from_sql += (SQL_LEFT_JOIN + quote_column(concat_field(base_table, t.nest)) + " " + t.alias + SQL_ON + quote_column(t.alias, PARENT) + SQL_EQ + quote_column(previous.alias, UID)) selects = [] groupby = [] for i, e in enumerate(query.groupby): for edge_sql in SQLang[e.value].to_sql(schema): column_number = len(selects) sql_type, sql = edge_sql.sql.items()[0] if sql is SQL_NULL and not e.value.var in schema.keys(): Log.error("No such column {{var}}", var=e.value.var) column_alias = _make_column_name(column_number) groupby.append(sql) selects.append(sql_alias(sql, column_alias)) if edge_sql.nested_path == ".": select_name = edge_sql.name else: select_name = "." index_to_column[column_number] = ColumnMapping( is_edge=True, push_name=e.name, push_column_name=e.name.replace("\\.", "."), push_column=i, push_child=select_name, pull=get_column(column_number), sql=sql, column_alias=column_alias, type=sql_type_to_json_type[sql_type]) for i, select in enumerate(listwrap(query.select)): column_number = len(selects) sql_type, sql = SQLang[select.value].to_sql( schema)[0].sql.items()[0] if sql == 'NULL' and not select.value.var in schema.keys(): Log.error("No such column {{var}}", var=select.value.var) # AGGREGATE if select.value == "." and select.aggregate == "count": sql = sql_count(SQL_ONE) else: sql = sql_call(sql_aggs[select.aggregate], sql) if select.default != None: sql = sql_coalesce([sql, quote_value(select.default)]) selects.append(sql_alias(sql, select.name)) index_to_column[column_number] = ColumnMapping( push_name=select.name, push_column_name=select.name, push_column=i + len(query.groupby), push_child=".", pull=get_column(column_number), sql=sql, column_alias=quote_column(select.name), type=sql_type_to_json_type[sql_type]) for w in query.window: selects.append(self._window_op(self, query, w)) where = SQLang[query.where].to_sql(schema)[0].sql.b command = (SQL_SELECT + (sql_list(selects)) + SQL_FROM + from_sql + SQL_WHERE + where + SQL_GROUPBY + sql_list(groupby)) if query.sort: command += SQL_ORDERBY + sql_list( sql_iso(sql[t]) + SQL_IS_NULL + "," + sql[t] + (" DESC" if s.sort == -1 else "") for s, sql in [(s, SQLang[s.value].to_sql(schema)[0].sql) for s in query.sort] for t in "bns" if sql[t]) return command, index_to_column