def update(self, column, column_value, primaryKey_value): update_query = sql.SQL("UPDATE {} SET {} = {} WHERE {} = {}").format( sql.Identifier(self.table), sql.Identifier(column), sql.Placeholder(), sql.Identifier(self.primarykey), sql.Placeholder()) self._execute(update_query, (column_value, primaryKey_value)) self._counter += 1
def update(*args, **kwargs): """ Updates elements of an existing row into db object using a dynamic SQL request. Updates are done using both args and kwargs: Old way : >> cur = db.cursor() >> cur.execute("UPDATE users SET email= %s WHERE user_id= %s)", ('foo@bar', user_id)) >> db.commit() >> db.rollback() New way : >> db.update('users', email='foo@bar', user_id=user_id) Table name is always the FIRST args. """ table = args[0] l_kwargs_keys = list(kwargs.keys()) values = dict() query = sql.SQL("UPDATE {} SET {} = {} WHERE {} = {}")\ .format(sql.Identifier(table),\ sql.Identifier(str(l_kwargs_keys[0])),\ sql.Placeholder(name=str(l_kwargs_keys[0])),\ sql.Identifier(str(l_kwargs_keys[1])),\ sql.Placeholder(name=str(l_kwargs_keys[1]))) for x in l_kwargs_keys: values.update({str(x): kwargs[x]}) conn = db.__conn__() db.__cursor__(conn).execute(query, values) db.__close__(conn)
def query_conditional(column, comparison, values): """ """ comparison = comparison.upper() if comparison in COMPARISON_TYPES: query = sql.SQL("({col} {comp} ({vals}))").format( col=query_column(column), comp=sql.SQL(comparison), vals=sql.SQL(', ').join(sql.Placeholder() * len(values))) elif comparison in COMPARISON_TYPES_EXTENDED: if len(values) == 2: query = sql.SQL("({col} {comp} {val1} AND {val2})").format( col=query_column(column), comp=sql.SQL(comparison), val1=sql.Placeholder(), val2=sql.Placeholder()) else: raise ValueError( "Exactly 2 values required for BETWEEN conditional.") else: raise ValueError( "Unsupported WHERE conditional.\n " "(\"{}\" is not a valid comparison type.)".format(comparison)) return query, values
def find(self, scheme, table, query={}, columns='*', sort=[], skip=0, limit=10): print(query) where, values = self.__where(query) cmd = sql.SQL('SELECT {} FROM {}.{} WHERE {}').format( columns == '*' and sql.SQL(columns) or sql.SQL(', ').join(map(sql.Identifier, columns)), sql.Identifier(scheme), sql.Identifier(table), where) if sort: order = sql.SQL('{} {}').format(sql.Identifier(sort[0][0]), sql.SQL(sort[0][1])) for item in sort[1:]: order = sql.SQL('{}, {} {}').format(order, sql.Identifier(item[0]), sql.SQL(item[1])) cmd = sql.SQL('{} ORDER BY {}').format(cmd, order) cmd = sql.SQL('{} LIMIT {} OFFSET {}').format( cmd, sql.Placeholder('_limit'), sql.Placeholder('_skip')) values['_skip'] = skip * limit values['_limit'] = limit with Conn(self.__handle) as conn: with conn.cursor() as cur: cur.execute(cmd, values) print(cur.query) for row in cur: yield row
def test_eq(self): self.assertEqual(sql.Placeholder('foo'), sql.Placeholder('foo')) self.assertNotEqual(sql.Placeholder('foo'), sql.Placeholder('bar')) self.assertNotEqual(sql.Placeholder('foo'), 'foo') self.assertEqual(sql.Placeholder(), sql.Placeholder()) self.assertNotEqual(sql.Placeholder('foo'), sql.Placeholder()) self.assertNotEqual(sql.Placeholder('foo'), sql.Literal('foo'))
def test_eq(self): self.assert_(sql.Placeholder("foo") == sql.Placeholder("foo")) self.assert_(sql.Placeholder("foo") != sql.Placeholder("bar")) self.assert_(sql.Placeholder("foo") != "foo") self.assert_(sql.Placeholder() == sql.Placeholder()) self.assert_(sql.Placeholder("foo") != sql.Placeholder()) self.assert_(sql.Placeholder("foo") != sql.Literal("foo"))
def test_eq(self): self.assertTrue(sql.Placeholder('foo') == sql.Placeholder('foo')) self.assertTrue(sql.Placeholder('foo') != sql.Placeholder('bar')) self.assertTrue(sql.Placeholder('foo') != 'foo') self.assertTrue(sql.Placeholder() == sql.Placeholder()) self.assertTrue(sql.Placeholder('foo') != sql.Placeholder()) self.assertTrue(sql.Placeholder('foo') != sql.Literal('foo'))
def get_earned(self, person, start_time, end_time): self.log('__function__: Starting....', level='DEBUG') conn = self.connect() if conn: cur = conn.cursor() self.log('__function__: Connection created', level='DEBUG') else: cur = None try: qry_str = sql.SQL( "SELECT SUM (total_points) FROM {} WHERE name = {} AND datetime BETWEEN {} AND {}" ).format(sql.Identifier('chore', 'history'), sql.Placeholder(), sql.Placeholder(), sql.Placeholder()) # self.log(qry_str.as_string(conn)) cur.execute(qry_str, (person, start_time, end_time)) row = cur.fetchone() if row[0] is None: return 0 return row[0] except (Exception, psycopg2.DatabaseError) as error: self.error(error, level='ERROR') finally: if conn is not None: conn.close() self.log('__function__: Database connection closed.', level='DEBUG')
def generateSqlCheckRelationsBusiness2People(queryData: dict) -> str: if "relations" in queryData and isinstance(queryData["relations"], list): sqlQueryList = [] dbTableValues = {} relationIndex = 0 for relation in queryData["relations"]: relationIndex += 1 bidColumnPlaceholder = 'col_bid_{}'.format(relationIndex) pidColumnPlaceholder = 'col_pid_{}'.format(relationIndex) dbTableValues[bidColumnPlaceholder] = relation['businessId'] dbTableValues[pidColumnPlaceholder] = relation['peopleId'] queryData['businessIdPlaceholder'] = sql.Placeholder( bidColumnPlaceholder) queryData['peopleIdPlaceholder'] = sql.Placeholder( pidColumnPlaceholder) queryData['dbTableCondition'] = sql.SQL(""" WHERE {bidIdentifier} LIKE {businessIdPlaceholder} AND {pidIdentifier} LIKE {peopleIdPlaceholder}""").format( **queryData) sqlPrefix = ' UNION ALL ' if relationIndex == 1: sqlPrefix = '' querySql = sql.SQL(sqlPrefix + """SELECT EXISTS (SELECT 1 FROM {dbTableSchemaIdentifier}.{dbTableNameIdentifier} {dbTableCondition})""").format(**queryData) sqlQueryList.append(querySql) return {'sql': sqlQueryList, 'data': dbTableValues} else: raise Exception('Bad query.')
async def updateEntry(self, database, updates, conditions): """Update an entry in a table. Parameters ---------- database : str The name of the table the entry is deleted from updates : dict The column name(s) and information that the entry/ies to be updated will be updated to have conditions : dict The column name(s) and information that the entry/ies to be updated must have """ command = sql.SQL(''' UPDATE {table} SET {updateValues} WHERE {updateConditions} ''').format( table=sql.Identifier(database), updateValues=sql.SQL(',').join( sql.Composed( [sql.Identifier(key), sql.SQL(" = "), sql.Placeholder()]) for key in updates.keys()), updateConditions=sql.SQL(' AND ').join( sql.Composed( [sql.Identifier(key), sql.SQL(" = "), sql.Placeholder()]) for key in conditions.keys())) self.cursor.execute(command, list(updates.values()) + list(conditions.values())) self.connection.commit()
def update_detail(self, obj, cursor=None): table_name = sql.Identifier('details') obj_dict = obj.to_dict() where_sql = sql.SQL("{0}={1}").format(sql.Identifier('detail_id'), sql.Placeholder('detail_id')) if 'detail_id' not in obj_dict: if 'queue_id' not in obj_dict or 'proxy_id' not in obj_dict: raise Exception( "cannot update detail without a detail id, queue id, or proxy id" ) where_sql = sql.SQL("{0}={1} AND {2}={3}").format( sql.Identifier('queue_id'), sql.Placeholder('queue_id'), sql.Identifier('proxy_id'), sql.Placeholder('proxy_id')) set_sql = sql.SQL(', ').join([ sql.SQL("{0}={1}").format(sql.Identifier(k), sql.Placeholder(k)) for k in obj_dict.keys() ]) update = sql.SQL('UPDATE {0} SET {1} WHERE {2}').format( table_name, set_sql, where_sql) if cursor is not None: cursor.execute(update, obj.to_dict()) else: self.do_query(update, obj.to_dict())
def build_coordinate_filters(conn, dna_or_aa, coordinate_mode, coordinate_ranges): snv_cols = ["snp_str", "snv_name", "color", "pos", "ref", "alt"] snv_filter = [] if dna_or_aa == constants["DNA_OR_AA"]["DNA"]: snv_table = "dna_snp" elif dna_or_aa == constants["DNA_OR_AA"]["AA"]: snv_cols.append("nt_pos") if coordinate_mode == constants["COORDINATE_MODES"]["COORD_GENE"]: snv_table = "gene_aa_snp" snv_cols.append("gene") snv_filter.append( sql.SQL('snp_data."gene" = {gene}').format( gene=sql.Placeholder("selected_gene") ) ) elif coordinate_mode == constants["COORDINATE_MODES"]["COORD_PROTEIN"]: snv_table = "protein_aa_snp" snv_cols.append("protein") snv_filter.append( sql.SQL('snp_data."protein" = {protein}').format( protein=sql.Placeholder("selected_protein") ) ) snv_filter = sql.SQL(" AND ").join(snv_filter) pos_filter = [] pos_filter_injections = {} for i in range(len(coordinate_ranges)): pos_column = "pos" if dna_or_aa == constants["DNA_OR_AA"]["DNA"] else "nt_pos" pos_filter.append( sql.SQL( """ (snp_data.{pos_column} >= {start} AND snp_data.{pos_column} <= {end}) """ ).format( pos_column=sql.Identifier(pos_column), start=sql.Placeholder("range_{}_start".format(i)), end=sql.Placeholder("range_{}_end".format(i)), ) ) # Build injection map pos_filter_injections["range_{}_start".format(i)] = coordinate_ranges[i][0] pos_filter_injections["range_{}_end".format(i)] = coordinate_ranges[i][1] pos_filter = sql.SQL(" OR ").join(pos_filter) # Compose final WHERE expression snv_filter = [snv_filter] # Only combine the snv_filter and pos_filter if the snv_filter exists if snv_filter[0].as_string(conn): snv_filter.append(sql.SQL(" AND ")) snv_filter.append(pos_filter) snv_filter = sql.Composed(snv_filter) return snv_cols, snv_filter, snv_table, pos_filter_injections
def get_totals(self, person, column, start_time, end_time): """[summary] Parameters ---------- person : [str] [description] column : [type] [description] start_time : [type] [description] end_time : [type] [description] Returns ------- [type] [description] """ self.log('__function__: Starting....', level='DEBUG') conn = self.connect() if conn: cur = conn.cursor() self.log('__function__: Connection created', level='DEBUG') else: cur = None self.log(start_time, level='DEBUG') self.log(end_time, level='DEBUG') if person == 'Zach': person_name = "{Zach}" if person == 'Travis': person_name = "{Travis}" if person == 'Matt': person_name = "{Matt}" try: qry_str = sql.SQL( "SELECT SUM ({}) FROM {} WHERE required_person @> {} AND datetime BETWEEN {} AND {}" ).format(sql.Identifier(column), sql.Identifier('chore', 'history'), sql.Placeholder(), sql.Placeholder(), sql.Placeholder()) cur.execute(qry_str, (person_name, start_time, end_time)) row = cur.fetchone() if row[0] is None: return 0 return row[0] except (Exception, psycopg2.DatabaseError) as error: self.error(error, level='ERROR') finally: if conn is not None: conn.close() self.log('__function__: Database connection closed.', level='DEBUG')
def update_stmt(db_conn, table, params, where=None): """SQL Update statement creator and execution. Args: table (str): Name of the table to update params (dict): A dict containing column name + value to insert where (listof tuples, optional): A list of tuples containing where clauses. Defaults to None. """ base_str = pgsql.SQL("UPDATE {} SET {} {}") data = [] stmts = [] table = pgsql.Identifier(table) for key, val in params.items(): stmts.append( pgsql.SQL("{} = {}").format(pgsql.Identifier(key), pgsql.Placeholder())) data.append(val) set_clause = pgsql.SQL(", ").join(stmts) if where: where_clause = pgsql.SQL("WHERE {}") stmts.clear() if isinstance(where, list): for w in where: stmts.append( pgsql.SQL("{} = {}").format(pgsql.Identifier(w[0]), pgsql.Placeholder())) data.append(w[1]) stmts = pgsql.SQL(", ").join(stmts) else: stmts = pgsql.SQL("{} = {}").format(pgsql.Identifier(where[0]), pgsql.Placeholder()) data.append(where[1]) where_clause = where_clause.format(stmts) else: where_clause = pgsql.SQL('') base_str = base_str.format(table, set_clause, where_clause) try: cursor = db_conn.cursor() cursor.execute(base_str, tuple(data)) db_conn.commit() except pg.Error as err: cursor.mogrify(base_str, tuple(data)) print(base_str, data) print(err)
def entry(cursor, table, entry_updater): composable_sets = [ sql.SQL(' = ').join([sql.Identifier(key), sql.Placeholder(key)]) for key in entry_updater.keys() ] query = sql.SQL("UPDATE {} SET {} WHERE id = {}").format( sql.Identifier(table), sql.SQL(', ').join(composable_sets), sql.Placeholder('id')) cursor.execute(query, entry_updater)
def insert(*args, **kwargs): """ Inserts a new row into db object using a dynamic SQL request. Simple inserts are done using only args: Old way : >> cur = db.cursor() >> cur.execute("INSERT INTO users (first_name, last_name, email) VALUES (%s, %s, %s)", ('Thomas', 'Barbot', 'foo@bar')) >> db.commit() >> db.rollback() New way : >> db.insert('users', 'first_name, last_name, email', 'foo', 'bar', 'foo@bar') Inserts with conditions (where) are done using kwargs : Old way : >> cur = db.cursor() >> cur.execute("INSERT INTO users (first_name, last_name, email) SELECT %s, %s, %s WHERE NOT EXISTS (SELECT * FROM users WHERE email = %s)",\ ('Thomas', 'Barbot', 'foo@bar', 'foo@bar')) >> db.commit() >> db.rollback() New way : >> db.insert('users', 'first_name, last_name, email', 'Thomas', 'Barbot', 'foo@bar', email='foo@bar') In both case, table name is always the FIRST args. """ table = args[0] values = dict() elmts_list = list(zip(args[1].split(","), args[2:])) for elmts in elmts_list: values.update({elmts[0]: elmts[-1]}) if not kwargs: query = sql.SQL("INSERT INTO {} ({}) VALUES({})")\ .format(sql.Identifier(table),\ sql.SQL(",").join(map(sql.Identifier, [a.strip() for a in args[1].split(",")])),\ sql.SQL(",").join(map(lambda x : sql.Placeholder(name=x), [a.strip() for a in args[1].split(",")]))) if kwargs: l_kwargs_keys = list(kwargs.keys()) query = sql.SQL("INSERT INTO {} ({}) SELECT {} WHERE NOT EXISTS (SELECT * FROM {} WHERE {} = {})")\ .format(sql.Identifier(table),\ sql.SQL(",").join(map(sql.Identifier, [a.strip() for a in args[1].split(",")])),\ sql.SQL(",").join(map(lambda x : sql.Placeholder(name=x), [a.strip() for a in args[1].split(",")])),\ sql.Identifier(table),\ sql.Identifier(str(l_kwargs_keys[0])),\ sql.Placeholder(name=str(l_kwargs_keys[0]))) values.update({str(l_kwargs_keys[0]): kwargs[l_kwargs_keys[0]]}) conn = db.__conn__() db.__cursor__(conn).execute(query, values) db.__close__(conn)
def update_multiple_columns(self, columns, columns_value, primaryKey_value): update_query = sql.SQL( "UPDATE {} SET ({}) = ({}) WHERE {} = {}").format( sql.Identifier(self.table), sql.SQL(',').join(map(sql.Identifier, columns)), sql.SQL(', ').join(sql.Placeholder() * len(columns_value)), sql.Identifier(self.primarykey), sql.Placeholder()) Placeholder_value = list(columns_value) Placeholder_value.append(primaryKey_value) Placeholder_value = tuple(Placeholder_value) self._execute(update_query, Placeholder_value) self._counter += 1
def not_between(node, eval_type, given_variables): """Process the not_between operator. :param node: Formula node :param eval_type: Type of evaluation :param given_variables: Dictionary of var/values :return: Boolean result, SQL query, or text result """ if eval_type == EVAL_EXP: # Python evaluation varvalue = get_value(node, given_variables) if value_is_null(varvalue): return False if node['type'] not in ('integer', 'double', 'datetime'): raise Exception( ugettext( 'Evaluation error: Type {0} not allowed ' + 'with operator BETWEEN', ).format(node['type']), ) left = GET_CONSTANT[node['type']](node['value'][0]) right = GET_CONSTANT[node['type']](node['value'][1]) return not left <= varvalue <= right if eval_type == EVAL_SQL: # SQL evaluation query = sql.SQL( '({0} NOT BETWEEN {1} AND {2}) AND ({0} is not null)', ).format( OnTaskDBIdentifier(node['field']), sql.Placeholder(), sql.Placeholder(), ) fields = [str(number) for number in node['value']] return query, fields # Text evaluation return '{0} < {1} or {0} > {2} or {0} is empty'.format( node['field'], str(node['value'][0]), str(node['value'][1]), )
def update_detail(self,obj,cursor=None): table_name = sql.Identifier('details') obj_dict = obj.to_dict() if 'detail_id' not in obj_dict: raise Exception("attempting to update a detail wtihout a detail id") del obj_dict['detail_id'] where_sql = sql.SQL("{0}={1}").format(sql.Identifier('detail_id'),sql.Placeholder('detail_id')) set_sql = sql.SQL(', ').join([sql.SQL("{0}={1}").format(sql.Identifier(k),sql.Placeholder(k)) for k in obj_dict.keys()]) update = sql.SQL('UPDATE {0} SET {1} WHERE {2}').format(table_name,set_sql,where_sql) if cursor is not None: cursor.execute(update,obj.to_dict()) else: self.do_query(update,obj.to_dict())
def not_ends_with(node, eval_type, given_variables): """Process the not_ends_width operator. :param node: Formula node :param eval_type: Type of evaluation :param given_variables: Dictionary of var/values :return: Boolean result, SQL query, or text result """ constant = GET_CONSTANT.get(node['type'])(node['value']) if eval_type == EVAL_EXP: # Python evaluation varvalue = get_value(node, given_variables) return ( (not value_is_null(varvalue)) and (not varvalue.endswith(constant)) ) if eval_type == EVAL_SQL: # SQL evaluation query = sql.SQL('({0} NOT LIKE {1}) OR ({0} is null)').format( OnTaskDBIdentifier(node['field']), sql.Placeholder(), ) fields = ['%' + node['value']] return query, fields # Text evaluation return '{0} does not end with {1}'.format(node['field'], constant)
def contains(node, eval_type, given_variables): """Process the contains operator. :param node: Formula node :param eval_type: Type of evaluation :param given_variables: Dictionary of var/values :return: Boolean result, SQL query, or text result """ constant = GET_CONSTANT.get(node['type'])(node['value']) if eval_type == EVAL_EXP: # Python evaluation varvalue = get_value(node, given_variables) return ( (not value_is_null(varvalue)) and varvalue.find(constant) != -1 ) if eval_type == EVAL_SQL: # SQL evaluation query = sql.SQL('({0} LIKE {1}) AND ({0} is not null)').format( OnTaskDBIdentifier(node['field']), sql.Placeholder(), ) fields = ['%' + node['value'] + '%'] return query, fields # Text evaluation return '{0} contains {1}'.format(node['field'], constant)
def upsert_weather_data(self, table_name: str = 'weather', **kwargs): composite_key = ['date', 'countrycode', 'gid'] self.check_if_gid_exists(kwargs) sql_query = sql.SQL( """INSERT INTO {table_name} ({insert_keys}) VALUES ({insert_data}) ON CONFLICT (date, countrycode, gid) DO UPDATE SET {update_data} RETURNING * """).format( table_name=sql.Identifier(table_name), insert_keys=sql.SQL(",").join( map(sql.Identifier, kwargs.keys())), insert_data=sql.SQL(",").join( map(sql.Placeholder, kwargs.keys())), update_data=sql.SQL(",").join( sql.Composed( [sql.Identifier(k), sql.SQL("="), sql.Placeholder(k)]) for k in kwargs.keys() if k not in composite_key)) self.execute(sql_query, kwargs) logger.debug("Updating {} table with data: {}".format( table_name, list(kwargs.values())))
def upsert_table_data(self, table_name: str, data_keys: List, **kwargs): self.check_if_gid_exists(kwargs) sql_query = sql.SQL( """INSERT INTO {table_name} ({insert_keys}) VALUES ({insert_data}) ON CONFLICT (date, country, countrycode, COALESCE(adm_area_1, ''), COALESCE(adm_area_2, ''), COALESCE(adm_area_3, ''), source) DO UPDATE SET {update_data} RETURNING *""").format( table_name=sql.Identifier(table_name), insert_keys=sql.SQL(",").join( map(sql.Identifier, kwargs.keys())), insert_data=sql.SQL(",").join( map(sql.Placeholder, kwargs.keys())), update_data=sql.SQL(",").join( sql.Composed( [sql.Identifier(k), sql.SQL("="), sql.Placeholder(k)]) for k in kwargs.keys() if k in data_keys)) self.execute(sql_query, kwargs) logger.debug("Updating {} table with data: {}".format( table_name, list(kwargs.values())))
def seed_table(self, connection): if not self.data_csv: return with connection.cursor() as cursor, \ open(self.data_csv) as csv_file: reader = csv.DictReader(csv_file) for row in reader: keys, values = zip(*row.items()) values = [x if x != '' else None for x in values] placeholders = sql.SQL(', ').join(sql.Placeholder() * len(keys)) columns = sql.SQL(', ').join(map(sql.Identifier, keys)) cursor.execute( sql.SQL( "INSERT INTO {table} ({columns}) VALUES({placeholders});" ).format( table=sql.Identifier(self.name), columns=columns, placeholders=placeholders, ), values) connection.commit()
def update_weather(self, conn, source_map, records): for r in records: r['source_id'] = source_map[r['source']] for fields, records in self.make_batches(records).items(): logger.info( "Exporting %d records with fields %s", len(records), tuple(fields)) stmt = self.UPDATE_WEATHER_STMT.format( weather_table=sql.Identifier(self.WEATHER_TABLE), constraint=sql.Identifier(f'{self.WEATHER_TABLE}_key'), fields=sql.SQL(', ').join( sql.Identifier(f) for f in fields), conflict_updates=sql.SQL(', ').join( sql.SQL(self.UPDATE_WEATHER_CONFLICT_UPDATE).format( field=sql.Identifier(f), weather_table=sql.Identifier(self.WEATHER_TABLE)) for f in fields), ) template = sql.SQL( "(%(timestamp)s, %(source_id)s, {values})" ).format( values=sql.SQL(', ').join( sql.Placeholder(f) for f in fields), ) with conn.cursor() as cur: execute_values(cur, stmt, records, template, page_size=1000) if self.UPDATE_WEATHER_CLEANUP: with conn.cursor() as cur: cur.execute(self.UPDATE_WEATHER_CLEANUP)
def insert_data(table, columns, row, types, conn): #print ( columns ) numberType = [ 'integer', 'numeric', 'bigint', 'smallint', 'double precision' ] tableInsertStr = "insert into " + table + " ({}) values ({})" insertSql = sql.SQL(tableInsertStr).format( sql.SQL(', ').join(map(sql.Identifier, columns)), \ sql.SQL(', ').join(sql.Placeholder() * len(columns))) # print ( insertSql.as_string(conn)) cursor = conn.cursor() valVec = [x for x in row] i = 0 for item in columns: columnType = types.get(item) if valVec[i] == None or valVec[i] == '': if columnType == 'text': valVec[i] = '' else: valVec[i] = None else: if columnType == 'timestamp': valVec[i] = re.sub('GMT-.*$', '', valVec[i]) i += 1 # print (valVec) cursor.execute(insertSql, valVec) conn.commit()
def select_sequences(conn, cur, req): location_ids = req.get("location_ids", None) all_location_ids = sum(location_ids.values(), []) start_date = pd.to_datetime(req.get("start_date", None)) end_date = pd.to_datetime(req.get("end_date", None)) # Metadata filters will come in the form of a JSON object # of { metadata_field: [metadata_values] } selected_metadata_fields = req.get("selected_metadata_fields", None) metadata_filters = [] # Build dictionary of metadata value tuples to inject metadata_vals = {} for md_key, md_vals in selected_metadata_fields.items(): # Don't process if the list of metadata values is empty if not md_vals: continue metadata_filters.append( sql.SQL("{field} IN {vals}").format( field=sql.Identifier(md_key), vals=sql.Placeholder(md_key) ) ) metadata_vals[md_key] = tuple([str(val) for val in md_vals]) metadata_filters = sql.SQL(" AND ").join(metadata_filters) if metadata_filters.as_string(conn): metadata_filters = sql.Composed([metadata_filters, sql.SQL(" AND ")]) temp_table_name = "query_" + uuid.uuid4().hex cur.execute( sql.SQL( """ CREATE TEMP TABLE {temp_table_name} ON COMMIT DROP AS ( SELECT m.* FROM "metadata" m WHERE {metadata_filters} "collection_date" >= %(start_date)s AND "collection_date" <= %(end_date)s AND "location_id" IN %(location_ids)s ); """ ).format( temp_table_name=sql.Identifier(temp_table_name), metadata_filters=metadata_filters, ), { "start_date": start_date, "end_date": end_date, "location_ids": tuple(all_location_ids), # Inject metadata value filters **metadata_vals, }, ) return temp_table_name
def save(self, inst): """Stores `inst` in current storage.""" # Save to metadata table if not self.table_exists(inst.meta.uri): self.table_create(inst.meta, inst.dimensions.values()) colnames = ['uuid', 'uri', 'meta', 'dims' ] + [p.name for p in inst.meta['properties']] q = sql.SQL('INSERT INTO {0} ({1}) VALUES ({2});').format( sql.Identifier(inst.meta.uri), sql.SQL(', ').join(map(sql.Identifier, colnames)), (sql.Placeholder() * len(colnames)).join(', ')) values = [ inst.uuid, inst.uri, inst.meta.uri, list(inst.dimensions.values()), ] + [ dlite.standardise(v, asdict=False) for v in inst.properties.values() ] try: self.cur.execute(q, values) except psycopg2.IntegrityError: self.conn.rollback() # Instance already in database return # Save to uuidtable if not self.table_exists('uuidtable'): self.uuidtable_create() q = sql.SQL('INSERT INTO uuidtable (uuid, meta) VALUES (%s, %s);') self.cur.execute(q, [inst.uuid, inst.meta.uri]) self.conn.commit()
def not_equal(node, eval_type, given_variables): """Process the not equal operator. :param node: Formula node :param eval_type: Type of evaluation :param given_variables: Dictionary of var/values :return: Boolean result, SQL query, or text result """ constant = GET_CONSTANT.get(node['type'])(node['value']) if eval_type == EVAL_EXP: # Python evaluation varvalue = get_value(node, given_variables) return (not value_is_null(varvalue)) and varvalue != constant if eval_type == EVAL_SQL: # SQL evaluation query = sql.SQL('({0} != {1}) OR ({0} is null)').format( OnTaskDBIdentifier(node['field']), sql.Placeholder(), ) fields = [str(constant)] return query, fields # Text evaluation return '{0} ≠ {1} and not empty'.format( node['field'], constant, )
async def findEntries(self, database, searchTerms, columns): """Retrieve specific columns of entries meeting a user-defined requirement. Parameters ---------- database : str The name of the table the entry is retrieved from searchTerms : dictionary The column name(s) and information that the entry/ies returned must contain. columns : list The column name(s) of the entry/ies that is/are returned Returns ------- List Tuples containing the information of every entry in the table """ command = sql.SQL(''' SELECT {columns} FROM {table} WHERE {conditions} ''').format(columns=sql.SQL(', ').join( sql.Identifier(database, column) for column in columns), table=sql.Identifier(database), conditions=sql.SQL(' AND ').join( sql.Composed([ sql.Identifier(database, key), sql.SQL(" = "), sql.Placeholder() ]) for key in searchTerms.keys())) self.cursor.execute(command, list(searchTerms.values())) return self.cursor.fetchall()