def to_sql(self, schema, not_null=False, boolean=False): lhs = self.lhs.to_sql(schema) rhs = self.rhs.to_sql(schema) acc = [] if len(lhs) != len(rhs): Log.error("lhs and rhs have different dimensionality!?") for l, r in zip(lhs, rhs): for t in "bsnj": if l.sql[t] == None: if r.sql[t] == None: pass else: acc.append(sql_iso(r.sql[t]) + " IS " + SQL_NULL) else: if r.sql[t] == None: acc.append(sql_iso(l.sql[t]) + " IS " + SQL_NULL) else: acc.append("(" + sql_iso(l.sql[t]) + " = " + sql_iso(r.sql[t]) + " OR (" + sql_iso(l.sql[t]) + " IS" + SQL_NULL + SQL_AND + "(" + r.sql[t] + ") IS NULL))") if not acc: return FALSE.to_sql(schema) else: return wrap([{"name": ".", "sql": {"b": SQL_OR.join(acc)}}])
def _build_list_sql(self, db, first, batch_size): # TODO: ENSURE THE LAST COLUMN IS THE id if first: dim = len(self._extract.field) where = SQL_OR.join( sql_iso( sql_and( quote_column(f) + ineq(i, e, dim) + db.quote_value(Date(v) if t == "time" else v) for e, (f, v, t) in enumerate( zip(self._extract.field[0:i + 1:], first, self._extract.type[0:i + 1:])))) for i in range(dim)) else: where = SQL_TRUE selects = [] for t, f in zip(self._extract.type, self._extract.field): if t == "time": selects.append( "CAST" + sql_iso(sql_alias(quote_column(f), SQL("DATETIME(6)")))) else: selects.append(quote_column(f)) sql = (SQL_SELECT + sql_list(selects) + SQL_FROM + self.settings.snowflake.fact_table + SQL_WHERE + where + SQL_ORDERBY + sql_list(quote_column(f) for f in self._extract.field) + SQL_LIMIT + db.quote_value(batch_size)) return sql
def to_sql(self, schema, not_null=False, boolean=False): lhs = self.lhs.to_sql(schema, not_null=True)[0].sql rhs = self.rhs.to_sql(schema, not_null=True)[0].sql lhs_exists = self.lhs.exists().to_sql(schema)[0].sql rhs_exists = self.rhs.exists().to_sql(schema)[0].sql if len(lhs) == 1 and len(rhs) == 1: return wrap([{"name": ".", "sql": { "b": sql_iso(lhs.values()[0]) + " " + InequalityOp.operators[self.op] + " " + sql_iso(rhs.values()[0]) }}]) ors = [] for l in "bns": ll = lhs[l] if not ll: continue for r in "bns": rr = rhs[r] if not rr: continue elif r == l: ors.append( sql_iso(lhs_exists[l]) + SQL_AND + sql_iso(rhs_exists[r]) + SQL_AND + sql_iso(lhs[l]) + " " + InequalityOp.operators[self.op] + " " + sql_iso(rhs[r]) ) elif (l > r and self.op in ["gte", "gt"]) or (l < r and self.op in ["lte", "lt"]): ors.append( sql_iso(lhs_exists[l]) + SQL_AND + sql_iso(rhs_exists[r]) ) sql = sql_iso(SQL_OR.join(sql_iso(o) for o in ors)) return wrap([{"name": ".", "sql": {"b": sql}}])
def to_sql(self, schema, not_null=False, boolean=False): return wrap([{ "name": ".", "sql": {"b": SQL_OR.join( sql_iso(t.to_sql(schema, boolean=True)[0].sql.b) for t in self.terms )} }])
def to_sql(self, schema, not_null=False, boolean=False): if not isinstance(self.superset, Literal): Log.error("Not supported") j_value = json2value(self.superset.json) if j_value: var = self.value.to_sql(schema) return SQL_OR.join(sql_iso(var + "==" + quote_value(v)) for v in j_value) else: return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}])
def to_sql(self, schema, not_null=False, boolean=False): field = self.field.to_sql(schema)[0].sql acc = [] for t, v in field.items(): if t in "bns": acc.append(sql_iso(v + SQL_IS_NOT_NULL)) if not acc: return wrap([{"name": ".", "sql": {"b": SQL_FALSE}}]) else: return wrap([{"name": ".", "sql": {"b": SQL_OR.join(acc)}}])
def to_sql(self, schema, not_null=False, boolean=False): if not is_op(self.superset, Literal): Log.error("Not supported") j_value = json2value(self.superset.json) if j_value: var = SQLang[self.value].to_sql(schema) sql = SQL_OR.join( sql_iso(ConcatSQL((v, SQL_IN, quote_list(j_value)))) for t, v in var[0].sql.items()) else: sql = SQL_FALSE return wrap([{"name": ".", "sql": {"b": sql}}])
def to_sql(self, schema, not_null=False, boolean=False): lhs = SQLang[self.lhs].to_sql(schema) rhs = SQLang[self.rhs].to_sql(schema) acc = [] if len(lhs) != len(rhs): Log.error("lhs and rhs have different dimensionality!?") for l, r in zip(lhs, rhs): for t in "bsnj": if l.sql[t] == None: if r.sql[t] == None: pass else: acc.append(sql_iso(r.sql[t]) + SQL_IS_NULL) elif l.sql[t] is ZERO: if r.sql[t] == None: acc.append(SQL_FALSE) elif r.sql[t] is ZERO: Log.error( "Expecting expression to have been simplified already" ) else: acc.append(r.sql[t]) else: if r.sql[t] == None: acc.append(sql_iso(l.sql[t]) + SQL_IS_NULL) elif r.sql[t] is ZERO: acc.append(l.sql[t]) else: acc.append( sql_iso(l.sql[t]) + " = " + sql_iso(r.sql[t])) if not acc: return FALSE.to_sql(schema) else: return SQLScript(expr=SQL_OR.join(acc), frum=self, data_type=BOOLEAN, miss=FALSE, schema=schema)
if query_edge.value: domain = SQL_UNION_ALL.join( SQL_SELECT + sql_alias(quote_value(coalesce(p.dataIndex, i)), quote_column("rownum")) + SQL_COMMA + sql_alias(quote_value(p.value), domain_name) for i, p in enumerate(query_edge.domain.partitions)) if query_edge.allowNulls: domain += (SQL_UNION_ALL + SQL_SELECT + sql_alias( quote_value(len(query_edge.domain.partitions)), quote_column("rownum")) + SQL_COMMA + sql_alias(SQL_NULL, domain_name)) where = None join_type = SQL_LEFT_JOIN if query_edge.allowNulls else SQL_INNER_JOIN on_clause = (SQL_OR.join( join_column(edge_alias, k) + " = " + v for k, v in zip(domain_names, vals)) + SQL_OR + sql_iso( join_column(edge_alias, domain_name) + SQL_IS_NULL + SQL_AND + SQL_AND.join(v + SQL_IS_NULL for v in vals))) null_on_clause = None else: domain = SQL_UNION_ALL.join( SQL_SELECT + sql_alias(quote_value(pp), domain_name) for pp, p in enumerate(query_edge.domain.partitions)) where = None join_type = SQL_LEFT_JOIN if query_edge.allowNulls else SQL_INNER_JOIN on_clause = SQL_AND.join( join_column(edge_alias, k) + " = " + sql
SQL_SELECT + sql_alias(quote_value(coalesce(p.dataIndex, i)), quote_column("rownum")) + SQL_COMMA + sql_alias(quote_value(p.value), domain_name) for i, p in enumerate(query_edge.domain.partitions) ) if query_edge.allowNulls: domain += ( SQL_UNION_ALL + SQL_SELECT + sql_alias(quote_value(len(query_edge.domain.partitions)), quote_column("rownum")) + SQL_COMMA + sql_alias(SQL_NULL, domain_name) ) where = None join_type = SQL_LEFT_JOIN if query_edge.allowNulls else SQL_INNER_JOIN on_clause = ( SQL_OR.join( quote_column(edge_alias, k) + " = " + v for k, v in zip(domain_names, vals) ) + SQL_OR + sql_iso( quote_column(edge_alias, domain_name) + SQL_IS_NULL + SQL_AND + SQL_AND.join(v + SQL_IS_NULL for v in vals) ) ) null_on_clause = None else: domain = SQL_UNION_ALL.join( SQL_SELECT + sql_alias(quote_value(pp), domain_name) for pp, p in enumerate(query_edge.domain.partitions) ) where = None join_type = SQL_LEFT_JOIN if query_edge.allowNulls else SQL_INNER_JOIN on_clause = SQL_AND.join(
def _esfilter2sqlwhere(db, esfilter): """ CONVERT ElassticSearch FILTER TO SQL FILTER db - REQUIRED TO PROPERLY QUOTE VALUES AND COLUMN NAMES """ esfilter = wrap(esfilter) if esfilter is True: return SQL_TRUE elif esfilter["and"]: return sql_iso(SQL_AND.join([esfilter2sqlwhere(db, a) for a in esfilter["and"]])) elif esfilter["or"]: return sql_iso(SQL_OR.join([esfilter2sqlwhere(db, a) for a in esfilter["or"]])) elif esfilter["not"]: return SQL_NOT + sql_iso(esfilter2sqlwhere(db, esfilter["not"])) elif esfilter.term: return sql_iso(SQL_AND.join([ quote_column(col) + SQL("=") + quote_value(val) for col, val in esfilter.term.items() ])) elif esfilter.terms: for col, v in esfilter.terms.items(): if len(v) == 0: return "FALSE" try: int_list = convert.value2intlist(v) has_null = False for vv in v: if vv == None: has_null = True break if int_list: filter = int_list_packer(col, int_list) if has_null: return esfilter2sqlwhere(db, {"or": [{"missing": col}, filter]}) elif 'terms' in filter and set(filter['terms'].get(col, []))==set(int_list): return quote_column(col) + " in " + quote_list(int_list) else: return esfilter2sqlwhere(db, filter) else: if has_null: return esfilter2sqlwhere(db, {"missing": col}) else: return "false" except Exception as e: e = Except.wrap(e) pass return quote_column(col) + " in " + quote_list(v) elif esfilter.script: return sql_iso(esfilter.script) elif esfilter.range: name2sign = { "gt": SQL(">"), "gte": SQL(">="), "lte": SQL("<="), "lt": SQL("<") } def single(col, r): min = coalesce(r["gte"], r[">="]) max = coalesce(r["lte"], r["<="]) if min != None and max != None: # SPECIAL CASE (BETWEEN) sql = quote_column(col) + SQL(" BETWEEN ") + quote_value(min) + SQL_AND + quote_value(max) else: sql = SQL_AND.join( quote_column(col) + name2sign[sign] + quote_value(value) for sign, value in r.items() ) return sql terms = [single(col, ranges) for col, ranges in esfilter.range.items()] if len(terms) == 1: output = terms[0] else: output = sql_iso(SQL_AND.join(terms)) return output elif esfilter.missing: if isinstance(esfilter.missing, text_type): return sql_iso(quote_column(esfilter.missing) + SQL_IS_NULL) else: return sql_iso(quote_column(esfilter.missing.field) + SQL_IS_NULL) elif esfilter.exists: if isinstance(esfilter.exists, text_type): return sql_iso(quote_column(esfilter.exists) + SQL_IS_NOT_NULL) else: return sql_iso(quote_column(esfilter.exists.field) + SQL_IS_NOT_NULL) elif esfilter.match_all: return SQL_TRUE elif esfilter.instr: return sql_iso(SQL_AND.join(["instr" + sql_iso(quote_column(col) + ", " + quote_value(val)) + ">0" for col, val in esfilter.instr.items()])) else: Log.error("Can not convert esfilter to SQL: {{esfilter}}", esfilter=esfilter)