def flush(self): query = """ DELETE FROM cache; """ with self.connect() as cursor: cursor.execute(query) logger.debug('Flushed PostgreSQL cache tables')
def initialize_schema(self): """Create PostgreSQL tables, and run necessary schema migrations. .. note:: Relies on JSONB fields, available in recent versions of PostgreSQL. """ version = self._get_installed_version() if not version: # Create full schema. self._check_database_encoding() self._check_database_timezone() # Create full schema. self._execute_sql_file('schema.sql') logger.info('Created PostgreSQL storage tables ' '(version %s).' % self.schema_version) return logger.debug('Detected PostgreSQL schema version %s.' % version) migrations = [(v, v + 1) for v in range(version, self.schema_version)] if not migrations: logger.info('Schema is up-to-date.') for migration in migrations: # Check order of migrations. expected = migration[0] current = self._get_installed_version() error_msg = "Expected version %s. Found version %s." assert expected == current, error_msg % (expected, current) logger.info('Migrate schema from version %s to %s.' % migration) filepath = 'migration_%03d_%03d.sql' % migration self._execute_sql_file(os.path.join('migrations', filepath)) logger.info('Schema migration done.')
def connect(self, readonly=False): """Connect to the database and instantiates a cursor. At exiting the context manager, a COMMIT is performed on the current transaction if everything went well. Otherwise transaction is ROLLBACK, and everything cleaned up. If the database could not be be reached a 503 error is raised. """ conn = None cursor = None try: conn = self.pool.getconn() conn.autocommit = readonly options = dict(cursor_factory=psycopg2.extras.DictCursor) cursor = conn.cursor(**options) # Start context yield cursor # End context if not readonly: conn.commit() except psycopg2.Error as e: if cursor: logger.debug(cursor.query) logger.error(e) if conn and not conn.closed: conn.rollback() raise exceptions.BackendError(original=e) finally: if cursor: cursor.close() if conn and not conn.closed: self.pool.putconn(conn, close=self._always_close)
def flush(self): query = """ DELETE FROM user_principals; DELETE FROM access_control_entries; """ with self.connect() as cursor: cursor.execute(query) logger.debug('Flushed PostgreSQL permission tables')
def flush(self): query = """ DELETE FROM cache; """ # Since called outside request (e.g. tests), force commit. with self.client.connect(force_commit=True) as conn: conn.execute(query) logger.debug('Flushed PostgreSQL cache tables')
def flush(self): query = """ DELETE FROM user_principals; DELETE FROM access_control_entries; """ # Since called outside request (e.g. tests), force commit. with self.client.connect(force_commit=True) as conn: conn.execute(query) logger.debug('Flushed PostgreSQL permission tables')
def flush(self, auth=None): """Delete records from tables without destroying schema. Mainly used in tests suites. """ query = """ DELETE FROM deleted; DELETE FROM records; DELETE FROM metadata; """ with self.client.connect(force_commit=True) as conn: conn.execute(query) logger.debug('Flushed PostgreSQL storage tables')
def flush(self): """Delete records from tables without destroying schema. Mainly used in tests suites. """ query = """ DELETE FROM deleted; DELETE FROM records; DELETE FROM metadata; """ with self.connect() as cursor: cursor.execute(query) logger.debug('Flushed PostgreSQL storage tables')
def flush(self, auth=None): """Delete records from tables without destroying schema. Mainly used in tests suites. """ query = """ DELETE FROM deleted; DELETE FROM records; DELETE FROM timestamps; DELETE FROM metadata; """ with self.client.connect(force_commit=True) as conn: conn.execute(query) logger.debug('Flushed PostgreSQL storage tables')
def wrapped(*args, **kwargs): try: return func(*args, **kwargs) except RequestException as e: status_code = body = None if e.response is not None: status_code = e.response.status_code try: body = e.response.json() except ValueError: body = e.response.content if status_code == 404: record_id = '?' raise exceptions.RecordNotFoundError(record_id) logger.debug(body) raise exceptions.BackendError(original=e)
def initialize_schema(self): """Create PostgreSQL tables, and run necessary schema migrations. .. note:: Relies on JSONB fields, available in recent versions of PostgreSQL. """ version = self._get_installed_version() if not version: # Create full schema. self._check_database_encoding() self._check_database_timezone() # Create full schema. self._execute_sql_file('schema.sql') logger.info('Created PostgreSQL storage tables ' '(version %s).' % self.schema_version) return logger.debug('Detected PostgreSQL schema version %s.' % version) migrations = [(v, v + 1) for v in range(version, self.schema_version)] if not migrations: logger.info('Schema is up-to-date.') for migration in migrations: # Check order of migrations. expected = migration[0] current = self._get_installed_version() error_msg = "Expected version %s. Found version %s." if expected != current: raise AssertionError(error_msg % (expected, current)) logger.info('Migrate schema from version %s to %s.' % migration) filepath = 'migration_%03d_%03d.sql' % migration self._execute_sql_file(os.path.join('migrations', filepath)) logger.info('Schema migration done.')