def adapt_bits(bits): """psycopg2 adapter function for ``bitstring.Bits``. Encode SQL parameters from ``bitstring.Bits`` instances to SQL strings. """ if bits.length % 4 == 0: return ext.AsIs("X'%s'" % (bits.hex, )) return ext.AsIs("B'%s'" % (bits.bin, ))
def order_attr(self, col): """ Select the column value from the ordering_order table for this specific scene :param col: column to select on :return: value """ sql = ('SELECT %s ' 'FROM ordering_scene JOIN ordering_order ' 'ON ordering_order.id = ordering_scene.order_id ' 'WHERE ordering_scene.id = %s') log_sql = '' try: with db_instance() as db: log_sql = db.cursor.mogrify(sql, (db_extns.AsIs(col), self.id)) db.select(sql, (db_extns.AsIs(col), self.id)) ret = db[0][col] except DBConnectException as e: logger.critical('Error retrieving order_attr: {}\n' 'sql: {} \n'.format(e.message, log_sql)) raise SceneException(e) except KeyError as e: logger.critical('Error order_attr returned no results\n' 'sql: {}'.format(log_sql)) raise SceneException('Key Error: {}'.format(e.message)) return ret
def update(self, att, val): """ Update a specifed column value for this Scene object with a new value :param att: column to update :param val: new value :return: updated value from self """ sql = 'update ordering_scene set %s = %s where id = %s' log_sql = '' try: with db_instance() as db: log_sql = db.cursor.mogrify(sql, (db_extns.AsIs(att), val, self.id)) logger.info("\n*** Updating scene: \n" + log_sql + '\n***\n"') db.execute(sql, (db_extns.AsIs(att), val, self.id)) db.commit() except DBConnectException as e: raise SceneException('Error updating scene: {}\nSQL: {}'.format( e.message, log_sql)) self.__setattr__(att, val) return self.__getattribute__(att)
def update(self, att, val): """ Update a specified column value for this Order object :param att: column to update :param val: new value :return: updated value from self """ sql = 'update ordering_order set %s = %s where id = %s' log_sql = '' try: with db_instance() as db: log_sql = db.cursor.mogrify(sql, (db_extns.AsIs(att), val, self.id)) logger.info(log_sql) db.execute(sql, (db_extns.AsIs(att), val, self.id)) db.commit() except DBConnectException as e: logger.debug('Error updating order: {}\nSQL: {}'.format( e.message, log_sql)) self.__setattr__(att, val) return self.__getattribute__(att)
def get_5_question(cursor, limit, order_by, order_direction): query = f""" SELECT * FROM question ORDER BY %(order_by)s %(order_direction)s LIMIT %(limit)s """ cursor.execute( query, { "order_by": extensions.AsIs(order_by), "order_direction": extensions.AsIs(order_direction), "limit": limit }) return cursor.fetchall()
def get(cls, col_name, scene_name, orderid): """ Retrieve a value for a particular column based on the long name of the order :param col_name: column value to retrieve :param scene_name: scene/collection id :param orderid: long name for the related order, [email protected] :return: column value """ sql = ('select %s ' 'from ordering_scene ' 'join ordering_order ' 'on ordering_order.id = ordering_scene.order_id ' 'where ordering_scene.name = %s ' 'and ordering_order.orderid = %s') if '.' in col_name: _, col = col_name.split('.') else: col = col_name log_sql = '' try: with db_instance() as db: log_sql = (db.cursor. mogrify(sql, (db_extns.AsIs(col_name), scene_name, orderid))) logger.info(log_sql) db.select(sql, (db_extns.AsIs(col_name), scene_name, orderid)) ret = db[0][col] except DBConnectException as e: logger.debug('Error scene get\n' 'msg: {0}\n' 'sql: {1}'.format(e.message, log_sql)) raise SceneException(e.message) except KeyError as e: logger.debug('Scene.get returned no results\n' 'sql: {}'.format(log_sql)) raise SceneException('Key Error: {}' .format(e.message)) return ret
def save(self): """ Upsert self to the database """ sql = ('INSERT INTO ordering_order %s VALUES %s ' 'ON CONFLICT (orderid) ' 'DO UPDATE ' 'SET %s = %s') attr_tup = ('orderid', 'status', 'order_source', 'product_options', 'product_opts', 'order_type', 'initial_email_sent', 'completion_email_sent', 'note', 'completion_date', 'order_date', 'user_id', 'ee_order_id', 'email', 'priority') vals = tuple( self.__getattribute__(v) if v != 'product_opts' else json. dumps(self.__getattribute__(v)) for v in attr_tup) cols = '({})'.format(','.join(attr_tup)) log_sql = '' try: with db_instance() as db: log_sql = db.cursor.mogrify( sql, (db_extns.AsIs(cols), vals, db_extns.AsIs(cols), vals)) db.execute( sql, (db_extns.AsIs(cols), vals, db_extns.AsIs(cols), vals)) db.commit() logger.info('Saved updates to order id: {}\n' 'order.id: {}\nsql: {}\nargs: {}'.format( self.orderid, self.id, log_sql, zip(attr_tup, vals))) except DBConnectException as e: logger.debug('Error saving order: {}\nsql: {}'.format( e.message, log_sql)) raise OrderException(e) new = Order.find(self.id) for att in attr_tup: self.__setattr__(att, new.__getattribute__(att))
def save(self): """ Save the current configuration of the scene object to the DB """ sql = 'UPDATE ordering_scene SET %s = %s WHERE id = %s' attr_tup = ('status', 'cksum_download_url', 'log_file_contents', 'processing_location', 'retry_after', 'job_name', 'note', 'retry_count', 'sensor_type', 'product_dload_url', 'tram_order_id', 'completion_date', 'ee_unit_id', 'retry_limit', 'cksum_distro_location', 'product_distro_location', 'reported_orphan', 'orphaned', 'failed_lta_status_update', 'download_size', 'status_modified') vals = tuple(self.__getattribute__(v) for v in attr_tup) cols = '({})'.format(','.join(attr_tup)) log_sql = '' try: with db_instance() as db: log_sql = db.cursor.mogrify( sql, (db_extns.AsIs(cols), vals, self.id)) db.execute(sql, (db_extns.AsIs(cols), vals, self.id)) db.commit() logger.info('\n*** Saved updates to scene id: {}, name:{}\n' 'sql: {}\n args: {}\n***'.format( self.id, self.name, log_sql, zip(attr_tup, vals))) except DBConnectException as e: logger.critical("Error saving scene: {}\n" "sql: {}".format(e.message, log_sql)) raise SceneException(e) new = Scene.where({'id': self.id})[0] for att in attr_tup: self.__setattr__(att, new.__getattribute__(att))
def bulk_update(cls, ids=None, updates=None): """ Update a list of scenes with :param ids: ids of scenes to update :param updates: attributes to update :return: True """ if not isinstance(ids, (list, tuple)): raise TypeError('Scene.bulk_update ids should be a list') if not isinstance(updates, dict): raise TypeError('Scene.bulk_update updates should be a dict') sql = 'UPDATE ordering_scene SET %s = %s WHERE id in %s' fields = '({})'.format(','.join(updates.keys())) vals = tuple(updates.values()) ids = tuple(ids) if ",)" in sql: sql = sql.replace(",)", ")") log_sql = '' try: with db_instance() as db: log_sql = db.cursor.mogrify(sql, (db_extns.AsIs(fields), vals, ids)) logger.info('\n*** Bulk Updating scenes: \n' + log_sql + "\n\***\n") db.execute(sql, (db_extns.AsIs(fields), vals, ids)) db.commit() except DBConnectException as e: logger.critical('Error scene bulk_update: {}\nSQL: {}'.format( e.message, log_sql)) raise SceneException(e) return True
def adapt_wkt(value): return pgext.AsIs("ST_GeomFromText('%s')" % value.wkt)
def to_int(x): """psycopg2 interpolation to int""" return extensions.AsIs('%d' % int(x))
def to_float(x): """psycopg2 interpolation to float""" return extensions.AsIs('%f' % float(x))
import logging _logger = logging.getLogger(__name__) import re from psycopg2.pool import ThreadedConnectionPool from .sqlblock import BaseSQLBlock from .dtable import dsequence import psycopg2.extensions as _pgext # _pgext.register_type(_pgext.UNICODE) # _pgext.register_type(_pgext.UNICODEARRAY) # register postgresql type dsequence _pgext.register_adapter(dsequence, lambda seq: _pgext.AsIs(repr(seq.value))) class jsonb: pass class PostgreSQLBlock(BaseSQLBlock): _conn_pools = {} def __init__(self, dsn='DEFAULT', autocommit=False, record_type=None): super(PostgreSQLBlock, self).__init__('postgres', dsn, autocommit, record_type) @classmethod def set_dsn(cls, **kwargs):
def _unary(self, operator): # This is a total hack, but you need to combine a raw empty space with # the current node, in reverse order, with the connector being the # unary operator you want to apply. return self._combine(ext.AsIs(''), operator, True)
tables_df = pd.read_sql(''' SELECT tablename FROM pg_catalog.pg_tables WHERE tableowner=%(user)s ''', tm351_cleanup_conn, params={'user': DB_USER_CLEANUP}) # And DROP those tables. Use CASCADE to get rid of any # dependencies c = tm351_cleanup_conn.cursor() for t in list(tables_df['tablename'].values): print("DROPping table {}".format(t)) c.execute("DROP TABLE IF EXISTS %(tablename)s CASCADE;", {'tablename': pge.AsIs(t)}) c.close() tm351_cleanup_conn.commit() # Now we need to populate the tables in the tm351_hospital schema c = tm351_cleanup_conn.cursor() # (Re-)create the schema: print("Recreating the tm351_hospital schema") c.execute("DROP SCHEMA IF EXISTS tm351_hospital CASCADE;") c.execute("CREATE SCHEMA tm351_hospital;") print("Populating the tm351_hospital schema")