def setUp(self): '''Create a fresh database (dropping the old if necessary) Skips db creation if reset_db is False ''' # This is now done globally in test.py #installFakeConnect() if (self.template, self.dbname) != PgTestSetup._last_db: PgTestSetup._reset_db = True if not PgTestSetup._reset_db: # The database doesn't need to be reset. We reset the sequences # anyway (because they might have been incremented even if # nothing was committed), making sure not to disturb the # 'committed' flag, and we're done. con = self.superuser_connection() cur = con.cursor() if self.reset_sequences_sql is None: resetSequences(cur) else: cur.execute(self.reset_sequences_sql) con.commit() con.close() ConnectionWrapper.committed = False ConnectionWrapper.dirty = False return self.dropDb() # Take out an external lock on the template to avoid causing # contention and impeding other processes (pg performs poorly # when performing concurrent create db from a single template). pid = os.getpid() start = time.time() # try for up to 10 seconds: debug = False if debug: sys.stderr.write('%0.2f starting %s\n' % ( start, pid, )) l = None lockname = '/tmp/lp.createdb.%s' % (self.template, ) # Wait for the external lock. Most LP tests use the # DatabaseLayer which does a double-indirect: it clones the # launchpad_ftest_template into a per-test runner template, so # we don't have much template contention. # However there are a few tests in LP which do use template1 and # will contend a lot. Cloning template1 takes 0.2s on a modern # machine, so even a modest 8-way server will trivially backlog # on db cloning. # The 30 second time is enough to deal with the backlog on the # known template1 using tests. while time.time() - start < 30.0: try: if debug: sys.stderr.write('taking %s\n' % (pid, )) l = WriteLock(lockname) if debug: sys.stderr.write('%0.2f taken %s\n' % ( time.time(), pid, )) break except LockContention: if debug: sys.stderr.write('blocked %s\n' % (pid, )) time.sleep(random.random()) if l is None: raise LockContention(lockname) try: # The clone may be delayed if gc has not disconnected other # processes which have done a recent clone. So provide a spin # with an exponential backoff. attempts = 10 for counter in range(0, attempts): if debug: sys.stderr.write("%0.2f connecting %s %s\n" % (time.time(), pid, self.template)) con = self.superuser_connection(self.template) try: con.set_isolation_level(0) cur = con.cursor() try: _start = time.time() try: cur.execute("CREATE DATABASE %s TEMPLATE=%s " "ENCODING='UNICODE'" % (self.dbname, self.template)) # Try to ensure our cleanup gets invoked, even in # the face of adversity such as the test suite # aborting badly. atexit.register(self.dropDb) if debug: sys.stderr.write("create db in %0.2fs\n" % (time.time() - _start)) break except psycopg2.DatabaseError as x: if counter == attempts - 1: raise x = str(x) if 'being accessed by other users' not in x: raise finally: cur.close() finally: con.close() duration = (2**counter) * random.random() if debug: sys.stderr.write( '%0.2f busy:sleeping (%d retries) %s %s %s\n' % (time.time(), counter, pid, self.template, duration)) # Let the server wrap up whatever was blocking the copy # of the template. time.sleep(duration) end = time.time() if debug: sys.stderr.write( '%0.2f (%0.2f) completed (%d retries) %s %s\n' % (end, end - start, counter, pid, self.template)) finally: l.unlock() if debug: sys.stderr.write('released %s\n' % (pid, )) ConnectionWrapper.committed = False ConnectionWrapper.dirty = False PgTestSetup._last_db = (self.template, self.dbname) PgTestSetup._reset_db = False
def setUp(self): '''Create a fresh database (dropping the old if necessary) Skips db creation if reset_db is False ''' # This is now done globally in test.py #installFakeConnect() if (self.template, self.dbname) != PgTestSetup._last_db: PgTestSetup._reset_db = True if not PgTestSetup._reset_db: # The database doesn't need to be reset. We reset the sequences # anyway (because they might have been incremented even if # nothing was committed), making sure not to disturb the # 'committed' flag, and we're done. con = self.superuser_connection() cur = con.cursor() if self.reset_sequences_sql is None: resetSequences(cur) else: cur.execute(self.reset_sequences_sql) con.commit() con.close() ConnectionWrapper.committed = False ConnectionWrapper.dirty = False return self.dropDb() # Take out an external lock on the template to avoid causing # contention and impeding other processes (pg performs poorly # when performing concurrent create db from a single template). pid = os.getpid() start = time.time() # try for up to 10 seconds: debug = False if debug: sys.stderr.write('%0.2f starting %s\n' % (start, pid,)) l = None lockname = '/tmp/lp.createdb.%s' % (self.template,) # Wait for the external lock. Most LP tests use the # DatabaseLayer which does a double-indirect: it clones the # launchpad_ftest_template into a per-test runner template, so # we don't have much template contention. # However there are a few tests in LP which do use template1 and # will contend a lot. Cloning template1 takes 0.2s on a modern # machine, so even a modest 8-way server will trivially backlog # on db cloning. # The 30 second time is enough to deal with the backlog on the # known template1 using tests. while time.time() - start < 30.0: try: if debug: sys.stderr.write('taking %s\n' % (pid,)) l = WriteLock(lockname) if debug: sys.stderr.write('%0.2f taken %s\n' % (time.time(), pid,)) break except LockContention: if debug: sys.stderr.write('blocked %s\n' % (pid,)) time.sleep(random.random()) if l is None: raise LockContention(lockname) try: # The clone may be delayed if gc has not disconnected other # processes which have done a recent clone. So provide a spin # with an exponential backoff. attempts = 10 for counter in range(0, attempts): if debug: sys.stderr.write( "%0.2f connecting %s %s\n" % (time.time(), pid, self.template)) con = self.superuser_connection(self.template) try: con.set_isolation_level(0) cur = con.cursor() try: _start = time.time() try: cur.execute( "CREATE DATABASE %s TEMPLATE=%s " "ENCODING='UNICODE'" % ( self.dbname, self.template)) # Try to ensure our cleanup gets invoked, even in # the face of adversity such as the test suite # aborting badly. atexit.register(self.dropDb) if debug: sys.stderr.write( "create db in %0.2fs\n" % ( time.time() - _start)) break except psycopg2.DatabaseError as x: if counter == attempts - 1: raise x = str(x) if 'being accessed by other users' not in x: raise finally: cur.close() finally: con.close() duration = (2 ** counter) * random.random() if debug: sys.stderr.write( '%0.2f busy:sleeping (%d retries) %s %s %s\n' % ( time.time(), counter, pid, self.template, duration)) # Let the server wrap up whatever was blocking the copy # of the template. time.sleep(duration) end = time.time() if debug: sys.stderr.write( '%0.2f (%0.2f) completed (%d retries) %s %s\n' % (end, end - start, counter, pid, self.template)) finally: l.unlock() if debug: sys.stderr.write('released %s\n' % (pid,)) ConnectionWrapper.committed = False ConnectionWrapper.dirty = False PgTestSetup._last_db = (self.template, self.dbname) PgTestSetup._reset_db = False
The sampledata does not update the current values of all the sequences used to populate the primary keys (this was removed to aid in merging changes to the sampledata). This script resets all of these sequences to the correct value based on the maximum value currently found in the corresponding table. """ __metaclass__ = type import _pythonpath from optparse import OptionParser from lp.services.database.postgresql import resetSequences from lp.services.database.sqlbase import connect from lp.services.scripts import db_options if __name__ == '__main__': parser = OptionParser() db_options(parser) (options, args) = parser.parse_args() if args: parser.error("Too many options given") if not options.dbname: parser.error("Required option --dbname not given") con = connect() resetSequences(con.cursor()) con.commit()
""" The sampledata does not update the current values of all the sequences used to populate the primary keys (this was removed to aid in merging changes to the sampledata). This script resets all of these sequences to the correct value based on the maximum value currently found in the corresponding table. """ __metaclass__ = type import _pythonpath from optparse import OptionParser from lp.services.database.postgresql import resetSequences from lp.services.database.sqlbase import connect from lp.services.scripts import db_options if __name__ == '__main__': parser = OptionParser() db_options(parser) (options, args) = parser.parse_args() if args: parser.error("Too many options given") if not options.dbname: parser.error("Required option --dbname not given") con = connect() resetSequences(con.cursor()) con.commit()