def sql_query(query, schema=None): """ VERY BASIC QUERY EXPRESSION TO SQL :param query: jx-expression :return: SQL """ from jx_base import jx_expression from jx_bigquery.expressions._utils import BQLang query = wrap(query) acc = [SQL_SELECT] if not query.select: acc.append(SQL_STAR) else: select = _normalize_select(query.select, query["from"], schema) acc.append( JoinSQL( SQL_COMMA, [ sql_alias(BQLang[jx_expression(s.value)].to_bq(schema), escape_name(s.name)) for s in select ], )) acc.append(SQL_FROM) acc.append(quote_column(ApiName(*split_field(query["from"])))) if query.where: acc.append(SQL_WHERE) acc.append(BQLang[jx_expression(query.where)].to_bq(schema)) if query.sort: sort = _normalize_sort(query.sort) acc.append(SQL_ORDERBY) acc.append( JoinSQL( SQL_COMMA, [ ConcatSQL( BQLang[jx_expression(s.value)].to_bq(schema), SQL_DESC if s.sort == -1 else SQL_ASC, ) for s in sort ], )) if query.limit: acc.append(SQL_LIMIT) acc.append(BQLang[jx_expression(query.limit)].to_bq(schema)) return ConcatSQL(*acc)
def test_add(self): from jx_python import jx expr = jx_expression({"add": [1, 2]}) self.assertEqual(expr(), 3) self.assertEqual(expr.partial_eval(Python).to_python(), "3.0")
def sql_update(table, command): global SQLang command = wrap(command) acc = [ SQL_UPDATE, quote_column(table), SQL_SET, sql_list(sql_eq(**{k: v}) for k, v in command['set'].items()), ] if command.where: acc.append(SQL_WHERE) if command.where.eq: acc.append(sql_eq(**command.where.eq)) else: from jx_sqlite.expressions._utils import SQLang where = SQLang[jx_expression(command.where)].to_sql[0].b acc.append(where) return ConcatSQL(*acc)
def sql_query(command): """ VERY BASIC QUERY EXPRESSION TO SQL :param command: jx-expression :return: SQL """ command = wrap(command) acc = [SQL_SELECT] if command.select: acc.append( JoinSQL(SQL_COMMA, map(quote_column, listwrap(command.select)))) else: acc.append(SQL_STAR) acc.append(SQL_FROM) acc.append(quote_column(command["from"])) if command.where: acc.append(SQL_WHERE) if command.where.eq: acc.append(sql_eq(**command.where.eq)) else: from jx_sqlite.expressions import SQLang where = SQLang[jx_expression(command.where)].to_sql[0].b acc.append(where) sort = coalesce(command.orderby, command.sort) if sort: acc.append(SQL_ORDERBY) acc.append(JoinSQL(SQL_COMMA, map(quote_column, listwrap(sort)))) if command.limit: acc.append(SQL_LIMIT) acc.append( JoinSQL(SQL_COMMA, map(quote_value, listwrap(command.limit)))) return ConcatSQL(*acc)
def main(): since = Date.today() - Duration(SCATTER_RANGE) if config.database.host not in listwrap( config.analysis.expected_database_host): Log.error("Expecting database to be one of {{expected}}", expected=config.analysis.expected_database_host) if not config.analysis.interesting: Log.alert( "Expecting config file to have `analysis.interesting` with a json expression. All series are included." ) # SETUP DESTINATION deviant_summary = bigquery.Dataset( config.deviant_summary).get_or_create_table( read_only=True, kwargs=config.deviant_summary) if config.args.id: # EXIT EARLY AFTER WE GOT THE SPECIFIC IDS if len(config.args.id) < 4: step_detector.SHOW_CHARTS = True for signature_hash in config.args.id: process( signature_hash, since=since, source=config.database, deviant_summary=deviant_summary, show=True, ) return # DOWNLOAD if config.args.download: # GET INTERESTING SERIES where_clause = BQLang[jx_expression( config.analysis.interesting)].to_bq(deviant_summary.schema) # GET ALL KNOWN SERIES docs = list( deviant_summary.sql_query(f""" SELECT * EXCEPT (_rank, values) FROM ( SELECT *, row_number() over (partition by id order by last_updated desc) as _rank FROM {quote_column(deviant_summary.full_name)} ) a WHERE _rank=1 and {sql_iso(where_clause)} LIMIT {quote_value(DOWNLOAD_LIMIT)} """)) if len(docs) == DOWNLOAD_LIMIT: Log.warning("Not all signatures downloaded") File(config.args.download).write(list2tab(docs, separator=",")) # DEVIANT show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": { "abs": "overall_dev_score" }, "sort": "desc" }, limit=config.args.deviant, show_old=False, show_distribution=True, ) # MODAL show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort="overall_dev_score", limit=config.args.modal, where={"eq": { "overall_dev_status": "MODAL" }}, show_distribution=True, ) # OUTLIERS show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": "overall_dev_score", "sort": "desc" }, limit=config.args.outliers, where={"eq": { "overall_dev_status": "OUTLIERS" }}, show_distribution=True, ) # SKEWED show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": { "abs": "overall_dev_score" }, "sort": "desc" }, limit=config.args.skewed, where={"eq": { "overall_dev_status": "SKEWED" }}, show_distribution=True, ) # OK show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": { "abs": "overall_dev_score" }, "sort": "desc" }, limit=config.args.ok, where={"eq": { "overall_dev_status": "OK" }}, show_distribution=True, ) # NOISE show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": { "abs": "relative_noise" }, "sort": "desc" }, where={"gte": { "num_pushes": 30 }}, limit=config.args.noise, ) # EXTRA show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": { "abs": "max_extra_diff" }, "sort": "desc" }, where={"lte": { "num_new_segments": 7 }}, limit=config.args.extra, ) # MISSING show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": { "abs": "max_missing_diff" }, "sort": "desc" }, where={"lte": { "num_old_segments": 6 }}, limit=config.args.missing, ) # PATHOLOGICAL show_sorted( config=config, since=since, source=config.database, deviant_summary=deviant_summary, sort={ "value": "num_segments", "sort": "desc" }, limit=config.args.pathological, )