Пример #1
0
 def export_sales_order(self, cursor):
   transaction.enter_transaction_management(using=self.database)
   try:
     starttime = time()
     if self.verbosity > 0:
       print("Exporting expected delivery date of sales orders...")
     cursor.execute('''select demand.source, max(plandate)
         from demand
         left outer join out_demand
           on demand.name = out_demand.demand
         where demand.subcategory = 'openbravo'
           and status = 'open'
         group by source
        ''')
     count = 0
     body = [
       '<?xml version="1.0" encoding="UTF-8"?>',
       '<ob:Openbravo xmlns:ob="http://www.openbravo.com">'
       ]
     for i in cursor.fetchall():
       body.append('<OrderLine id="%s"><description>Planned delivery date %s</description></OrderLine>' % i)
       count += 1
       if self.verbosity > 0 and count % 500 == 1:
         print('.', end="")
     if self.verbosity > 0:
       print ('')
     body.append('</ob:Openbravo>')
     self.post_data('/openbravo/ws/dal/OrderLine', '\n'.join(body))
     if self.verbosity > 0:
       print("Updated %d sales orders in %.2f seconds" % (count, (time() - starttime)))
   except Exception as e:
     raise CommandError("Error updating sales orders: %s" % e)
   finally:
     transaction.rollback(using=self.database)
     transaction.leave_transaction_management(using=self.database)
Пример #2
0
    def getNextSong(self):
        # commit transaction to force fresh queryset result
        try:
            transaction.enter_transaction_management()
            transaction.commit()
        except BaseException:
            pass

        try:
            data = Queue.objects.all()
            data = data.annotate(VoteCount=Count("User"))
            data = data.annotate(MinCreated=Min("Created"))
            data = data.order_by("-VoteCount", "MinCreated")[0:1].get()
            self.addToHistory(data.Song, data.User)
            song_instance = data.Song
            data.delete()
        except ObjectDoesNotExist:
            try:
                song_instance = self.getRandomSongByPreferences()
                self.addToHistory(song_instance, None)
            except ObjectDoesNotExist:
                song_instance = Song.objects.order_by('?')[0:1].get()
                self.addToHistory(song_instance, None)

        # remove missing files
        if not os.path.exists(song_instance.Filename.name.encode('utf8')):
            Song.objects.all().filter(id=song_instance.id).delete()
            return self.getNextSong()

        return song_instance
Пример #3
0
    def _fixture_setup(self):
        if not connections_support_transactions():
            return super(TestCase, self)._fixture_setup()

        # If the test case has a multi_db=True flag, setup all databases.
        # Otherwise, just use default.
        if getattr(self, 'multi_db', False):
            databases = connections
        else:
            databases = [DEFAULT_DB_ALIAS]

        for db in databases:
            transaction.enter_transaction_management(using=db)
            transaction.managed(True, using=db)
        disable_transaction_methods()

        from django.contrib.sites.models import Site
        Site.objects.clear_cache()

        for db in databases:
            if hasattr(self, 'fixtures'):
                call_command('loaddata', *self.fixtures, **{
                                                            'verbosity': 0,
                                                            'commit': False,
                                                            'database': db
                                                            })
Пример #4
0
    def test_transaction_management(self):
        transaction.enter_transaction_management()
        transaction.managed(True)
        self.assertEqual(connection.isolation_level, self._read_committed)

        transaction.leave_transaction_management()
        self.assertEqual(connection.isolation_level, self._autocommit)
Пример #5
0
Файл: tests.py Проект: 10sr/hue
    def run_select_for_update(self, status, nowait=False):
        """
        Utility method that runs a SELECT FOR UPDATE against all
        Person instances. After the select_for_update, it attempts
        to update the name of the only record, save, and commit.

        This function expects to run in a separate thread.
        """
        status.append('started')
        try:
            # We need to enter transaction management again, as this is done on
            # per-thread basis
            transaction.enter_transaction_management()
            people = list(
                Person.objects.all().select_for_update(nowait=nowait)
            )
            people[0].name = 'Fred'
            people[0].save()
            transaction.commit()
        except DatabaseError as e:
            status.append(e)
        finally:
            # This method is run in a separate thread. It uses its own
            # database connection. Close it without waiting for the GC.
            transaction.abort()
            connection.close()
Пример #6
0
 def __enter__(self):
     if transaction.is_managed(self.using):
         # We're already in a transaction; create a savepoint.
         self.sid = transaction.savepoint(self.using)
     else:
         transaction.enter_transaction_management(using=self.using)
         transaction.managed(True, using=self.using)
Пример #7
0
def execute_transaction(sql, output=False, database='default'):
    "A transaction wrapper for executing a list of SQL statements"
    my_connection = connection
    using_args = {}

    if is_multi_db():
        if not database:
            database = DEFAULT_DB_ALIAS

        my_connection = connections[database]
        using_args['using'] = database

    try:
        # Begin Transaction
        transaction.enter_transaction_management(**using_args)
        transaction.managed(True, **using_args)

        cursor = my_connection.cursor()

        # Perform the SQL
        if output:
            write_sql(sql, database)

        execute_sql(cursor, sql)

        transaction.commit(**using_args)
        transaction.leave_transaction_management(**using_args)
    except Exception:
        transaction.rollback(**using_args)
        raise
Пример #8
0
    def handle(self, **options):
        '''The main entry point for the Django management command.'''

        import_start = datetime.datetime.now()
        # Start transaction management.
        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)
        try:
            self._promote_devel()
        except:
            self._rollback_db()
            raise
        # Finalize the transaction and close the db connection.
        transaction.commit()
        transaction.leave_transaction_management()
        connection.close()
        import_end = datetime.datetime.now()

        # Print a short summary of what we did.
        td = import_end - import_start
        print '\nProcessing complete in %s days, %s.%s seconds.' % (
          td.days, td.seconds, td.microseconds)
        print '  TraitData objects promoted: %s' % (
          PublicTraitData.objects.all().count(),)
Пример #9
0
 def update(self, **kwargs):
     """
     Updates all elements in the current QuerySet, setting all the given
     fields to the appropriate values.
     """
     assert self.query.can_filter(), \
             "Cannot update a query once a slice has been taken."
     query = self.query.clone(sql.UpdateQuery)
     query.add_update_values(kwargs)
     if not transaction.is_managed():
         transaction.enter_transaction_management()
         forced_managed = True
     else:
         forced_managed = False
     try:
         rows = query.execute_sql(None)
         if forced_managed:
             transaction.commit()
         else:
             transaction.commit_unless_managed()
     finally:
         if forced_managed:
             transaction.leave_transaction_management()
     self._result_cache = None
     return rows
Пример #10
0
 def _fixture_setup(self):
     transaction.commit_unless_managed()
     transaction.enter_transaction_management()
     transaction.managed(True)
     super(SeleniumTestCase, self)._fixture_setup()
     transaction.commit()
     transaction.leave_transaction_management()
Пример #11
0
    def run(self, max_depth=3):
        for p in self.plugins:
            p.set_output_dir(self.output_dir)

        old_DEBUG = settings.DEBUG
        settings.DEBUG = False

        setup_test_environment()
        test_signals.start_run.send(self)

        # To avoid tainting our memory usage stats with startup overhead we'll
        # do one extra request for the first page now:
        self.c.get(self.not_crawled[0][-1])

        while self.not_crawled:
            #Take top off not_crawled and evaluate it
            current_depth, from_url, to_url = self.not_crawled.pop(0)
            if current_depth > max_depth:
                continue

            transaction.enter_transaction_management()
            try:
                resp, returned_urls = self.get_url(from_url, to_url)
            except HTMLParseError, e:
                LOG.error("%s: unable to parse invalid HTML: %s", to_url, e)
            except Exception, e:
                LOG.exception("%s had unhandled exception: %s", to_url, e)
                continue
Пример #12
0
def grab_db_lock(lock_name, wait):
    """
    Grab a lock using a new, temporary connection. Yields a "success"
    boolean indicating whether the lock was successfully acquired or not.

    This context manager ensures that multidb does not override the
    connection when accessing the Lock model and its transaction. The
    code that runs within this context should NOT be affected - it must
    run as usual, with normal multidb functionality intact.

    """

    with connections.get() as using:

        try:

            with connection_state.force(None):
                transaction.enter_transaction_management(using=using)
                transaction.managed(True, using=using)
                lock = Lock.grab(lock_name, wait=wait, using=using)

            success = bool(lock)

            try:
                yield success
            finally:
                if success:
                    with connection_state.force(None):
                        lock.release(using=using)
                        transaction.commit(using=using)

        finally:
            with connection_state.force(None):
                transaction.leave_transaction_management(using=using)
Пример #13
0
 def setUp(self):
     # Create a second connection to the default database
     new_connections = ConnectionHandler(settings.DATABASES)
     self.conn2 = new_connections[DEFAULT_DB_ALIAS]
     # Put both DB connections into managed transaction mode
     transaction.enter_transaction_management()
     self.conn2.enter_transaction_management()
Пример #14
0
    def test_savepoint_localstore_flush(self):
        """
        This is a very simple test to see if savepoints will actually
        be committed, i.e. flushed out from localstore into cache.
        """
        from django.db import transaction
        transaction.enter_transaction_management()
        transaction.managed()

        TABLE_NAME = 'test_table'
        cache_backend = johnny.cache.get_backend()
        cache_backend.patch()
        keyhandler = cache_backend.keyhandler
        keygen = keyhandler.keygen
        
        tm = cache_backend.cache_backend
        
        # First, we set one key-val pair generated for our non-existing table.
        table_key = keygen.gen_table_key(TABLE_NAME)
        tm.set(table_key, 'val1')

        # Then we create a savepoint.
        # The key-value pair is moved into 'trans_sids' item of localstore.
        tm._create_savepoint('savepoint1')
        
        # We then commit all the savepoints, which should write the changes.
        tm.commit()
        # And this checks if it actually happened.
        backend = johnny_settings._get_backend()
        self.failUnless(backend.get(table_key))
Пример #15
0
	def clear_couple(self):
		from people.models import Couple, CoupleLog
		while True:
			transaction.enter_transaction_management()
			couples = Couple.objects.filter(end_time__lt=datetime.datetime.now(), is_valid=True)

			if couples.count() > 0:
				for couple in couples:
					couple.is_valid = False
					couple.save()

					try:
						couple.fuser.get_profile().couple = None
						couple.fuser.get_profile().save()
						send_couple_apns(couple.fuser, settings.LOG_COUPLE_EXPIRED_TXT, 'rm')
					except:
						print 'fuser error'

					try:
						couple.tuser.get_profile().couple = None
						couple.tuser.get_profile().save()
						send_couple_apns(couple.tuser, settings.LOG_COUPLE_EXPIRED_TXT, 'rm')
					except:
						print 'tuser error'

					coupleLog = CoupleLog(couple=couple, user=couple.fuser, content=settings.LOG_COUPLE_DISMATCH_TXT)
					coupleLog.save()

					print couple.id, ' expired and decoupled'

			transaction.commit()
			time.sleep(60)
Пример #16
0
    def setUp(self):
        super(TransactionalTestCase, self).setUp()

        transaction.enter_transaction_management()
        transaction.managed(True)

        self.setUpInTransaction()
Пример #17
0
 def test_savepoint_rollback(self):
     """Tests rollbacks of savepoints"""
     from django.db import transaction
     from testapp.models import Genre, Publisher
     from johnny import cache
     if not connection.features.uses_savepoints:
         return
     self.failUnless(transaction.is_managed() == False)
     self.failUnless(transaction.is_dirty() == False)
     connection.queries = []
     cache.local.clear()
     transaction.enter_transaction_management()
     transaction.managed()
     g = Genre.objects.get(pk=1)
     start_title = g.title
     g.title = "Adventures in Savepoint World"
     g.save()
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == "Adventures in Savepoint World")
     sid = transaction.savepoint()
     g.title = "In the Void"
     g.save()
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == "In the Void")
     transaction.savepoint_rollback(sid)
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == "Adventures in Savepoint World")
     transaction.rollback()
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == start_title)
     transaction.managed(False)
     transaction.leave_transaction_management()
Пример #18
0
    def _fixture_setup(self):
        if not connections_support_transactions():
            return super(TestCase, self)._fixture_setup()

        assert not self.reset_sequences, 'reset_sequences cannot be used on TestCase instances'

        # If the test case has a multi_db=True flag, setup all databases.
        # Otherwise, just use default.
        db_names = connections if getattr(self, 'multi_db', False) else [DEFAULT_DB_ALIAS]

        for db_name in db_names:
            transaction.enter_transaction_management(using=db_name)
            transaction.managed(True, using=db_name)
        disable_transaction_methods()

        from django.contrib.sites.models import Site
        Site.objects.clear_cache()

        for db in db_names:
            if hasattr(self, 'fixtures'):
                call_command('loaddata', *self.fixtures,
                             **{
                                'verbosity': 0,
                                'commit': False,
                                'database': db,
                                'skip_validation': True,
                             })
Пример #19
0
    def test_transaction_management(self):
        transaction.enter_transaction_management()
        self.assertFalse(connection.autocommit)
        self.assertEqual(connection.isolation_level, self._serializable)

        transaction.leave_transaction_management()
        self.assertTrue(connection.autocommit)
Пример #20
0
def atomic(using=None):
    """Perform database operations atomically within a transaction.

    The caller can use this to ensure SQL statements are executed within
    a transaction and then cleaned up nicely if there's an error.

    This provides compatibility with all supported versions of Django.

    Args:
        using (str, optional):
            The database connection name to use. Defaults to the default
            database connection.
    """
    if hasattr(transaction, 'atomic'):
        # Django >= 1.5
        with transaction.atomic(using=using):
            yield
    else:
        # Django < 1.5
        assert hasattr(transaction, 'enter_transaction_management')

        try:
            # Begin Transaction
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

            yield

            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)
        except Exception:
            transaction.rollback(using=using)
            raise
Пример #21
0
    def test_savepoint_rollback(self):
        """Tests rollbacks of savepoints"""
        if not connection.features.uses_savepoints:
            return
        self.assertFalse(is_managed())
        self.assertFalse(transaction.is_dirty())
        cache.local.clear()
        managed()
        transaction.enter_transaction_management()

        g = Genre.objects.get(pk=1)
        start_title = g.title
        g.title = "Adventures in Savepoint World"
        g.save()
        g = Genre.objects.get(pk=1)
        self.assertEqual(g.title, "Adventures in Savepoint World")
        sid = transaction.savepoint()
        g.title = "In the Void"
        g.save()
        g = Genre.objects.get(pk=1)
        self.assertEqual(g.title, "In the Void")
        transaction.savepoint_rollback(sid)
        g = Genre.objects.get(pk=1)
        self.assertEqual(g.title, "Adventures in Savepoint World")
        transaction.rollback()
        g = Genre.objects.get(pk=1)
        self.assertEqual(g.title, start_title)
Пример #22
0
    def handle(self, *args, **options):

        self.batch_size = options.get('batch_size', 50)
        self.preserve = options.get("preserve", False)
        self.index = options.get("index", False)
        self.newline = options.get("newline", False)
        
        if not self.index:
            old_realtime_indexing = getattr(settings, "REALTIME_INDEXING", None)
            #this is not recommended by the django manual, but in case of management command it seems to work
            settings.REALTIME_INDEXING = False 
        
        transaction.enter_transaction_management()
        transaction.managed(True)

        for records_url in args:
            print("Processing %s" % records_url)
            errors = self.process_url(records_url, options)
            print("Processing %s Done" % records_url)
            if errors:
                print("%d error(s) when processing %s, check your log file." % (len(errors), records_url))

        transaction.leave_transaction_management()
        
        if not self.index and old_realtime_indexing:
            settings.REALTIME_INDEXING = old_realtime_indexing
Пример #23
0
    def handle(self, *args, **options):
        from django.db import transaction

        path, slug, name = args

        # Start transaction management.
        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)

        book = Book.objects.get(slug=slug)

        root, ext = os.path.splitext(path)
        ext = ext.lower()
        if ext:
            ext = ext[1:]
            if ext == "zip":
                ext = "daisy"

        source_sha1 = BookMedia.read_source_sha1(path, ext)
        print "Source file SHA1:", source_sha1
        try:
            assert source_sha1
            bm = book.media.get(type=ext, source_sha1=source_sha1)
            print "Replacing media: %s (%s)" % (bm.name.encode("utf-8"), ext)
        except (AssertionError, BookMedia.DoesNotExist):
            bm = BookMedia(book=book, type=ext)
            print "Creating new media"
        bm.name = name
        bm.file.save(None, ExistingFile(path))
        bm.save()
        transaction.commit()
        transaction.leave_transaction_management()
Пример #24
0
 def handle(self, *args, **options):
     print("Download zip-archive...")
     f = urlopen(IPGEOBASE_SOURCE_URL)
     buffer = BytesIO(f.read())
     f.close()
     print("Unpacking...")
     zip_file = ZipFile(buffer)
     cities_file_read = _read_file(zip_file, 'cities.txt')
     cidr_optim_file_read = _read_file(zip_file, 'cidr_optim.txt')
     zip_file.close()
     buffer.close()
     print("Start updating...")
     list_cities = cities_file_read.decode(IPGEOBASE_CODING).split('\n')
     list_cidr_optim = \
         cidr_optim_file_read.decode(IPGEOBASE_CODING).split('\n')
     lines = \
         _get_cidr_optim_with_cities_lines(list_cidr_optim, list_cities)
     cursor = connection.cursor()
     transaction.enter_transaction_management()
     try:
         transaction.managed(True)
         print("Delete old rows in table ipgeobase...")
         cursor.execute(DELETE_SQL)
         print ("Write new data...")
         cursor.executemany(INSERT_SQL, [l for l in lines if l])
         transaction.commit()
     except Exception as e:
         message = "The data not updated:", e
         if send_message:
             mail_admins(subject=ERROR_SUBJECT, message=message)
         raise CommandError(message)
     finally:
         transaction.rollback()
         transaction.leave_transaction_management()
     return "Table ipgeobase is update.\n"
Пример #25
0
def serializerTest(format, self):
    # Clear the database first
    management.call_command('flush', verbosity=0, interactive=False)

    # Create all the objects defined in the test data
    objects = []
    transaction.enter_transaction_management()
    transaction.managed(True)
    for (func, pk, klass, datum) in test_data:
        objects.append(func[0](pk, klass, datum))
    transaction.commit()
    transaction.leave_transaction_management()

    # Add the generic tagged objects to the object list
    objects.extend(Tag.objects.all())

    # Serialize the test database
    serialized_data = serializers.serialize(format, objects, indent=2)

    # Flush the database and recreate from the serialized data
    management.call_command('flush', verbosity=0, interactive=False)
    transaction.enter_transaction_management()
    transaction.managed(True)
    for obj in serializers.deserialize(format, serialized_data):
        obj.save()
    transaction.commit()
    transaction.leave_transaction_management()

    # Assert that the deserialized data is the same
    # as the original source
    for (func, pk, klass, datum) in test_data:
        func[1](self, pk, klass, datum)
Пример #26
0
 def test_managed_response(self):
     transaction.enter_transaction_management()
     Band.objects.create(name="The Beatles")
     self.assertTrue(transaction.is_dirty())
     TransactionMiddleware().process_response(self.request, self.response)
     self.assertFalse(transaction.is_dirty())
     self.assertEqual(Band.objects.count(), 1)
Пример #27
0
    def restore(self):
        self.state = BACKUP_RESTORE_STATE_IN_PROGRESS
        self.save()

        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)
        try:
            self.tar = tarfile.open(self.backup_file.path, 'r:gz')

            # extract data.xml for parsing
            self.stream = self.tar.extractfile('backup/data.xml')
            self.restore_objects()

            self.restore_files()

            transaction.commit()
            transaction.leave_transaction_management()
            self.state = BACKUP_RESTORE_STATE_DONE
        except Exception as e:
            transaction.rollback()
            transaction.leave_transaction_management()
            self.state = BACKUP_RESTORE_STATE_ERROR
            self.error_message = e.__unicode__()
            mail_subject = _('Restore failed')
            mail_message = _('Restore for %(user)s failed with message : %(message)s') % {'user': self.user,
                                                                                         'message': e}
            mail_admins(mail_subject, mail_message, fail_silently=(not settings.DEBUG))

        self.save()

        # close and delete archive
        self.tar.close()
        os.remove(self.backup_file.path)
        self.backup_file = None
Пример #28
0
 def test_exception(self):
     transaction.enter_transaction_management()
     Band.objects.create(name='The Beatles')
     self.assertTrue(transaction.is_dirty())
     TransactionMiddleware().process_exception(self.request, None)
     self.assertFalse(transaction.is_dirty())
     self.assertEqual(Band.objects.count(), 0)
Пример #29
0
    def beforeTest(self, test):

        if not self.settings_path:
            # short circuit if no settings file can be found
            return

        from django.core.management import call_command
        from django.core.urlresolvers import clear_url_caches
        from django.db import connection, transaction
        from django.core import mail

        mail.outbox = []

        transaction_support = self._has_transaction_support(test)
        if transaction_support:
            transaction.enter_transaction_management()
            transaction.managed(True)
            self.disable_transaction_support(transaction)

        if isinstance(test, nose.case.Test) and \
            isinstance(test.test, nose.case.MethodTestCase) and \
            hasattr(test.context, 'fixtures'):
                # We have to use this slightly awkward syntax due to the fact
                # that we're using *args and **kwargs together.
                call_command('loaddata', *test.context.fixtures, **{'verbosity': 0})

        if isinstance(test, nose.case.Test) and \
            isinstance(test.test, nose.case.MethodTestCase) and \
            hasattr(test.context, 'urls'):
                # We have to use this slightly awkward syntax due to the fact
                # that we're using *args and **kwargs together.
                self.old_urlconf = settings.ROOT_URLCONF
                settings.ROOT_URLCONF = self.urls
                clear_url_caches()
Пример #30
0
    def test_savepoint_localstore_flush(self):
        """
        This is a very simple test to see if savepoints will actually
        be committed, i.e. flushed out from localstore into cache.
        """
        from django.db import transaction
        transaction.enter_transaction_management()
        transaction.managed()

        TABLE_NAME = 'test_table'
        cache_backend = johnny.cache.get_backend()
        cache_backend.patch()
        keyhandler = cache_backend.keyhandler
        keygen = keyhandler.keygen
        
        tm = cache_backend.cache_backend
        
        # First, we set one key-val pair generated for our non-existing table.
        table_key = keygen.gen_table_key(TABLE_NAME)
        tm.set(table_key, 'val1')

        # Then we create a savepoint.
        # The key-value pair is moved into 'trans_sids' item of localstore.
        tm._create_savepoint('savepoint1')
        
        # We then commit all the savepoints (i.e. only one in this case)
        # The items stored in 'trans_sids' should be moved back to the
        # top-level dictionary of our localstore
        tm._commit_all_savepoints()
        # And this checks if it actually happened.
        self.failUnless(table_key in tm.local)
Пример #31
0
    def handle(self, *fixture_labels, **options):
        from django.db.models import get_apps
        from django.core import serializers
        from django.db import connection, transaction
        from django.conf import settings

        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        objects_per_fixture = []
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)

        app_fixtures = [os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps()]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')
            if len(parts) == 1:
                fixture_name = fixture_label
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                sys.stderr.write(
                    self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." %
                        (fixture_name, format)))
                transaction.rollback()
                transaction.leave_transaction_management()
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for format in formats:
                    serializer = serializers.get_serializer(format)
                    if verbosity > 1:
                        print "Trying %s for %s fixture '%s'..." % \
                            (humanize(fixture_dir), format, fixture_name)
                    try:
                        full_path = os.path.join(fixture_dir, '.'.join([fixture_name, format]))
                        fixture = open(full_path, 'r')
                        if label_found:
                            fixture.close()
                            print self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
                                (fixture_name, humanize(fixture_dir)))
                            transaction.rollback()
                            transaction.leave_transaction_management()
                            return
                        else:
                            fixture_count += 1
                            objects_per_fixture.append(0)
                            if verbosity > 0:
                                print "Installing %s fixture '%s' from %s." % \
                                    (format, fixture_name, humanize(fixture_dir))
                            try:
                                objects = serializers.deserialize(format, fixture)
                                for obj in objects:
                                    object_count += 1
                                    objects_per_fixture[-1] += 1
                                    models.add(obj.object.__class__)
                                    obj.save()
                                label_found = True
                            except (SystemExit, KeyboardInterrupt):
                                raise
                            except Exception:
                                import traceback
                                fixture.close()
                                transaction.rollback()
                                transaction.leave_transaction_management()
                                if show_traceback:
                                    import traceback
                                    traceback.print_exc()
                                else:
                                    sys.stderr.write(
                                        self.style.ERROR("Problem installing fixture '%s': %s\n" %
                                             (full_path, traceback.format_exc())))
                                return
                            fixture.close()
                    except:
                        if verbosity > 1:
                            print "No %s fixture '%s' in %s." % \
                                (format, fixture_name, humanize(fixture_dir))


        # If any of the fixtures we loaded contain 0 objects, assume that an 
        # error was encountered during fixture loading.
        if 0 in objects_per_fixture:
            sys.stderr.write(
                self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" %
                    (fixture_name)))
            transaction.rollback()
            transaction.leave_transaction_management()
            return
            
        # If we found even one object in a fixture, we need to reset the 
        # database sequences.
        if object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
            if sequence_sql:
                if verbosity > 1:
                    print "Resetting sequences"
                for line in sequence_sql:
                    cursor.execute(line)
            
        transaction.commit()
        transaction.leave_transaction_management()

        if object_count == 0:
            if verbosity > 1:
                print "No fixtures found."
        else:
            if verbosity > 0:
                print "Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count)
Пример #32
0
def begin_work(run_transaction):
    if run_transaction:
        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)
        printv('Transaction started')
Пример #33
0
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError("Expected exactly 1 argument - Experiment ID")
        try:
            exp = Experiment.objects.get(pk=int(args[0]))
        except Experiment.DoesNotExist:
            raise CommandError("Experiment ID %s not found" % args[0])

        self.stdout.write("Delete the following experiment?\n\n")

        # Print basic experiment information
        self.stdout.write("Experiment\n    ID: {0}\n".format(exp.id))
        self.stdout.write("    Title: {0}\n".format(exp.title))
        self.stdout.write("    Public: {0}\n".format(exp.public))

        # List experiment authors
        authors = Author_Experiment.objects.filter(experiment=exp)
        self.stdout.write("    Authors:\n")
        for author in authors:
            self.stdout.write("        {0}\n".format(author.author))

        # List experiment metadata
        epsets = ExperimentParameterSet.objects.filter(experiment=exp)
        for epset in epsets:
            self.stdout.write("    Param Set: {0} - {1}\n".format(
                epset.schema.name, epset.schema.namespace))
            params = ExperimentParameter.objects.filter(parameterset=epset)
            for param in params:
                self.stdout.write("        {0} = {1}\n".format(
                    param.name.full_name, param.get()))

        # List experiment ACLs
        acls = ExperimentACL.objects.filter(experiment=exp)
        self.stdout.write("    ACLs:\n")
        for acl in acls:
            self.stdout.write("        {0}-{1}, flags: ".format(
                acl.pluginId, acl.entityId))
            if acl.canRead:
                self.stdout.write("R")
            if acl.canWrite:
                self.stdout.write("W")
            if acl.canDelete:
                self.stdout.write("D")
            if acl.isOwner:
                self.stdout.write("O")
            self.stdout.write("\n")

        # Basic Statistics
        datasets = Dataset.objects.filter(experiment=exp)
        datafiles = Dataset_File.objects.filter(dataset__experiment=exp)
        self.stdout.write("    {0} datset(s), containing {1} file(s)\n".format(
            datasets.count(), datafiles.count()))

        # If the user has only requested a listing finish now
        if options.get('list', False):
            return

        # User must enter "yes" to proceed
        self.stdout.write("\n\nConfirm Deletion? (yes): ")
        ans = sys.stdin.readline().strip()
        if ans != "yes":
            self.stdout.write("'yes' not entered, aborting.\n")
            return

        # Consider the entire experiment deletion atomic
        using = options.get('database', DEFAULT_DB_ALIAS)
        transaction.commit_unless_managed(using=using)
        transaction.enter_transaction_management(using=using)
        transaction.managed(True, using=using)

        try:
            acls.delete()
            epsets.delete()
            DatasetParameterSet.objects.filter(
                dataset__experiment=exp).delete()
            DatafileParameterSet.objects.filter(
                dataset_file__dataset__experiment=exp).delete()
            authors.delete()
            datasets.delete()
            datafiles.delete()
            exp.delete()

            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)
        except:
            transaction.rollback(using=using)
            raise CommandError("Exception occurred, rolling back transaction")
Пример #34
0
 def _pre_setup(self):
     transaction.enter_transaction_management()
     transaction.managed(True)
     if hasattr(self, 'fixtures'):
         self._load_fixtures(self.fixtures)
Пример #35
0
 def process_request(self, request):
     """Begin a transaction on request start.."""
     from django.db import transaction as django_transaction
     django_transaction.enter_transaction_management()
     django_transaction.managed(True)
     transaction.begin()
Пример #36
0
def delete_objects(seen_objs, using):
    """
    Iterate through a list of seen classes, and remove any instances that are
    referred to.
    """
    connection = connections[using]
    if not transaction.is_managed(using=using):
        transaction.enter_transaction_management(using=using)
        forced_managed = True
    else:
        forced_managed = False
    try:
        ordered_classes = seen_objs.keys()
    except CyclicDependency:
        # If there is a cyclic dependency, we cannot in general delete the
        # objects.  However, if an appropriate transaction is set up, or if the
        # database is lax enough, it will succeed. So for now, we go ahead and
        # try anyway.
        ordered_classes = seen_objs.unordered_keys()

    obj_pairs = {}
    try:
        for cls in ordered_classes:
            items = seen_objs[cls].items()
            items.sort()
            obj_pairs[cls] = items

            # Pre-notify all instances to be deleted.
            for pk_val, instance in items:
                if not cls._meta.auto_created:
                    signals.pre_delete.send(sender=cls, instance=instance)

            pk_list = [pk for pk,instance in items]
            del_query = sql.DeleteQuery(cls)
            del_query.delete_batch_related(pk_list, using=using)

            update_query = sql.UpdateQuery(cls)
            for field, model in cls._meta.get_fields_with_model():
                if (field.rel and field.null and field.rel.to in seen_objs and
                        filter(lambda f: f.column == field.rel.get_related_field().column,
                        field.rel.to._meta.fields)):
                    if model:
                        sql.UpdateQuery(model).clear_related(field, pk_list, using=using)
                    else:
                        update_query.clear_related(field, pk_list, using=using)

        # Now delete the actual data.
        for cls in ordered_classes:
            items = obj_pairs[cls]
            items.reverse()

            pk_list = [pk for pk,instance in items]
            del_query = sql.DeleteQuery(cls)
            del_query.delete_batch(pk_list, using=using)

            # Last cleanup; set NULLs where there once was a reference to the
            # object, NULL the primary key of the found objects, and perform
            # post-notification.
            for pk_val, instance in items:
                for field in cls._meta.fields:
                    if field.rel and field.null and field.rel.to in seen_objs:
                        setattr(instance, field.attname, None)

                if not cls._meta.auto_created:
                    signals.post_delete.send(sender=cls, instance=instance)
                setattr(instance, cls._meta.pk.attname, None)

        if forced_managed:
            transaction.commit(using=using)
        else:
            transaction.commit_unless_managed(using=using)
    finally:
        if forced_managed:
            transaction.leave_transaction_management(using=using)
Пример #37
0
    def handle_noargs(self, **options):
        transaction_method = 'none'
        #transaction_method = options.get('all') and 'commit' or transaction_method
        transaction_method = options.get(
            'dryrun') and 'rollback' or transaction_method
        max_count = options.get('all') and Pto.objects.all().count() or 1000

        _users = {}

        def get_user(email):
            if email not in _users:
                try:
                    user = User.objects.get(email=email)
                except User.DoesNotExist:
                    user = User.objects.create(
                        username=email.split('@')[0],
                        email=email,
                    )
                    user.set_unusable_password()
                _users[email] = user
            return _users[email]

        if not transaction_method == 'none':
            transaction.enter_transaction_management()
            transaction.managed(True)

        if max_count < Pto.objects.all().count():
            print "Capped to the first", max_count, "objects"

        count = 0
        for pto in Pto.objects.all().order_by('id')[:max_count]:
            #print pto.id
            user = get_user(pto.person)
            #print 'person', repr(pto.person), repr(user)
            added = self._timestamp_to_date(pto.added)
            #print 'added', repr(pto.added), added
            hours = pto.hours
            #print 'hours', float(pto.hours)

            #print 'details', repr(pto.details)
            start = self._timestamp_to_date(pto.start)
            #print 'start', repr(pto.start), start
            end = self._timestamp_to_date(pto.end)
            #print 'hours_daily', repr(pto.hours_daily)
            if pto.hours_daily:
                hours_daily = json.loads(pto.hours_daily)
                ts_keys = hours_daily.keys()
                for ts in ts_keys:
                    value = hours_daily.pop(ts)
                    hours_daily[self._timestamp_to_date(ts)] = value
                assert sum(hours_daily.values()) == hours
            else:
                try:
                    hours_daily = self._create_hours(hours, start, end)
                except ZeroDivisionError:
                    self._report_broken(pto)
                    continue
            #print hours, hours_daily
            # go ahead and add it
            entry = Entry.objects.create(
                user=user,
                start=start,
                end=end,
                total_hours=hours,
                details=pto.details.strip(),
                add_date=added,
                modify_date=added,
            )

            for d, t in hours_daily.items():
                Hours.objects.create(
                    entry=entry,
                    hours=t,
                    date=d,
                )

            pto.delete()
            count += 1

            #print 'end', repr(pto.end), end
            #print ""

        if not transaction_method == 'none':
            if transaction_method == 'rollback':
                print "rollbacked, no changed applied"
                transaction.rollback()
            else:
                transaction.commit()
                print "Migrated", count, "PTO entries"
Пример #38
0
    def import_data(self, dataset, dry_run=False, raise_errors=False,
            use_transactions=None):
        """
        Imports data from ``dataset``.

        ``use_transactions``
            If ``True`` import process will be processed inside transaction.
            If ``dry_run`` is set, or error occurs, transaction will be rolled
            back.
        """
        result = Result()
        result.diff_headers = self.get_diff_headers()

        if use_transactions is None:
            use_transactions = self.get_use_transactions()

        if use_transactions is True:
            # when transactions are used we want to create/update/delete object
            # as transaction will be rolled back if dry_run is set
            real_dry_run = False
            transaction.enter_transaction_management()
            transaction.managed(True)
        else:
            real_dry_run = dry_run

        instance_loader = self._meta.instance_loader_class(self, dataset)

        try:
            self.before_import(dataset, real_dry_run)
        except Exception as e:
            tb_info = traceback.format_exc(2)
            result.base_errors.append(Error(repr(e), tb_info))
            if raise_errors:
                if use_transactions:
                    transaction.rollback()
                    transaction.leave_transaction_management()
                raise

        for row in dataset.dict:
            try:
                row_result = RowResult()
                instance, new = self.get_or_init_instance(instance_loader, row)
                if new:
                    row_result.import_type = RowResult.IMPORT_TYPE_NEW
                else:
                    row_result.import_type = RowResult.IMPORT_TYPE_UPDATE
                row_result.new_record = new
                original = deepcopy(instance)
                if self.for_delete(row, instance):
                    if new:
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                        row_result.diff = self.get_diff(None, None,
                                real_dry_run)
                    else:
                        row_result.import_type = RowResult.IMPORT_TYPE_DELETE
                        self.delete_instance(instance, real_dry_run)
                        row_result.diff = self.get_diff(original, None,
                                real_dry_run)
                else:
                    self.import_obj(instance, row, real_dry_run)
                    if self.skip_row(instance, original):
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                    else:
                        self.save_instance(instance, real_dry_run)
                        self.save_m2m(instance, row, real_dry_run)
                        # Add object info to RowResult for LogEntry
                        row_result.object_repr = force_text(instance)
                        row_result.object_id = instance.pk
                    row_result.diff = self.get_diff(original, instance,
                            real_dry_run)
            except Exception as e:
                tb_info = traceback.format_exc(2)
                row_result.errors.append(Error(e, tb_info))
                if raise_errors:
                    if use_transactions:
                        transaction.rollback()
                        transaction.leave_transaction_management()
                    six.reraise(*sys.exc_info())
            if (row_result.import_type != RowResult.IMPORT_TYPE_SKIP or
                        self._meta.report_skipped):
                result.rows.append(row_result)

        if use_transactions:
            if dry_run or result.has_errors():
                transaction.rollback()
            else:
                transaction.commit()
            transaction.leave_transaction_management()

        return result
Пример #39
0
 def __enter__(self):
     if not transaction.is_managed(using=self.using):
         transaction.enter_transaction_management(using=self.using)
         self.forced_managed = True
     else:
         self.forced_managed = False
 def enter(self):
     db_transaction.enter_transaction_management(using=self.using)
    def handle(self, *fixture_labels, **options):

        ignore = options.get('ignore')
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line.")

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: open,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(upath(path))
            else:
                # It's a models.py module
                app_module_paths.append(upath(app.__file__))

        app_fixtures = [
            os.path.join(os.path.dirname(path), 'fixtures')
            for path in app_module_paths
        ]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats(
                        ):
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." %
                                              fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format."
                            % (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(
                            settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." %
                                              humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats,
                                             compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(p for p in [
                                fixture_name, database, format,
                                compression_format
                            ] if p)

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError(
                                            "Multiple fixtures named '%s' in %s. Aborting."
                                            % (fixture_name,
                                               humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(
                                        format,
                                        fixture,
                                        using=using,
                                        ignorenonexistent=ignore)

                                    for obj in objects:

                                        try:
                                            # Attempt to lookup any existing object using natural keys and
                                            # use that object's PK to duplicate and conflict records aren't created.
                                            nk = obj.object.natural_key()
                                            real_object = type(
                                                obj.object
                                            ).objects.get_by_natural_key(*nk)
                                            if real_object:
                                                obj.object.pk = real_object.pk
                                        except AttributeError:
                                            # Model class doesn't support natural keys.
                                            pass
                                        except type(obj.object).DoesNotExist:
                                            # No existing record, so proceed as normal.
                                            pass

                                        objects_in_fixture += 1
                                        if router.allow_syncdb(
                                                using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError,
                                                    IntegrityError) as e:
                                                e.args = (
                                                    "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s"
                                                    % {
                                                        'app_label':
                                                        obj.object._meta.
                                                        app_label,
                                                        'object_name':
                                                        obj.object._meta.
                                                        object_name,
                                                        'pk':
                                                        obj.object.pk,
                                                        'error_msg':
                                                        force_text(e)
                                                    }, )
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = (
                                            "Problem installing fixture '%s': %s"
                                            % (full_path, e), )
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)"
                                        % (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e, )
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" %
                                  (loaded_object_count, fixture_count))
            else:
                self.stdout.write(
                    "Installed %d object(s) (of %d) from %d fixture(s)" %
                    (loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()
Пример #42
0
    def handle(self, *fixture_labels, **options):
        from django.db.models import get_apps
        from django.core import serializers
        from django.db import connection, transaction
        from django.conf import settings

        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed()
            transaction.enter_transaction_management()
            transaction.managed(True)

        self.disable_forward_ref_checks(cursor)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: file,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_fixtures = [
            os.path.join(os.path.dirname(app.__file__), 'fixtures')
            for app in get_apps()
        ]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                self.enable_forward_ref_checks(cursor)
                sys.stderr.write(
                    self.style.ERROR(
                        "Problem installing fixture '%s': %s is not a known serialization format."
                        % (fixture_name, format)))
                transaction.rollback()
                transaction.leave_transaction_management()
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(
                    settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for format in formats:
                    for compression_format in compression_formats:
                        if compression_format:
                            file_name = '.'.join(
                                [fixture_name, format, compression_format])
                        else:
                            file_name = '.'.join([fixture_name, format])

                        if verbosity > 1:
                            print "Trying %s for %s fixture '%s'..." % \
                                (humanize(fixture_dir), file_name, fixture_name)
                        full_path = os.path.join(fixture_dir, file_name)
                        open_method = compression_types[compression_format]
                        try:
                            fixture = open_method(full_path, 'r')
                            if label_found:
                                fixture.close()
                                self.enable_forward_ref_checks(cursor)
                                print self.style.ERROR(
                                    "Multiple fixtures named '%s' in %s. Aborting."
                                    % (fixture_name, humanize(fixture_dir)))
                                transaction.rollback()
                                transaction.leave_transaction_management()
                                return
                            else:
                                fixture_count += 1
                                objects_in_fixture = 0
                                if verbosity > 0:
                                    print "Installing %s fixture '%s' from %s." % \
                                        (format, fixture_name, humanize(fixture_dir))
                                try:
                                    objects = serializers.deserialize(
                                        format, fixture)
                                    for obj in objects:
                                        objects_in_fixture += 1
                                        self.handle_ref_checks(cursor, obj)
                                        models.add(obj.object.__class__)
                                        obj.save()
                                    object_count += objects_in_fixture
                                    label_found = True
                                except (SystemExit, KeyboardInterrupt):
                                    self.enable_forward_ref_checks(cursor)
                                    raise
                                except Exception:
                                    import traceback
                                    fixture.close()
                                    self.enable_forward_ref_checks(cursor)
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    if show_traceback:
                                        traceback.print_exc()
                                    else:
                                        sys.stderr.write(
                                            self.style.ERROR(
                                                "Problem installing fixture '%s': %s\n"
                                                % (full_path, ''.join(
                                                    traceback.format_exception(
                                                        sys.exc_type,
                                                        sys.exc_value,
                                                        sys.exc_traceback)))))
                                    return
                                fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    self.enable_forward_ref_checks(cursor)
                                    sys.stderr.write(
                                        self.style.ERROR(
                                            "No fixture data found for '%s'. (File format may be invalid.)"
                                            % (fixture_name)))
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    return

                        except Exception, e:
                            if verbosity > 1:
                                print "No %s fixture '%s' in %s." % \
                                    (format, fixture_name, humanize(fixture_dir))
Пример #43
0
 def __enter__(self):
     transaction.enter_transaction_management(True, self._using)
     return transaction
Пример #44
0
    def handle(self, *fixture_labels, **options):
        using = options.get('database', DEFAULT_DB_ALIAS)

        connection = connections[using]
        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: file,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
        connection.begin_defer_constraint_checks()

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(path)
            else:
                # It's a models.py module
                app_module_paths.append(app.__file__)

        app_fixtures = [
            os.path.join(os.path.dirname(path), 'fixtures')
            for path in app_module_paths
        ]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    self.stdout.write("Loading '%s' fixtures...\n" %
                                      fixture_name)
            else:
                self.stderr.write(
                    self.style.ERROR(
                        "Problem installing fixture '%s': %s is not a known serialization format.\n"
                        % (fixture_name, format)))
                # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                connection.end_defer_constraint_checks()

                if commit:
                    transaction.rollback(using=using)
                    transaction.leave_transaction_management(using=using)
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(
                    settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    self.stdout.write("Checking %s for fixtures...\n" %
                                      humanize(fixture_dir))

                label_found = False
                for combo in product([using, None], formats,
                                     compression_formats):
                    database, format, compression_format = combo
                    file_name = '.'.join(
                        p for p in
                        [fixture_name, database, format, compression_format]
                        if p)

                    if verbosity > 1:
                        self.stdout.write("Trying %s for %s fixture '%s'...\n" % \
                            (humanize(fixture_dir), file_name, fixture_name))
                    full_path = os.path.join(fixture_dir, file_name)
                    open_method = compression_types[compression_format]
                    try:
                        fixture = open_method(full_path, 'r')
                        if label_found:
                            fixture.close()
                            self.stderr.write(
                                self.style.ERROR(
                                    "Multiple fixtures named '%s' in %s. Aborting.\n"
                                    % (fixture_name, humanize(fixture_dir))))
                            # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                            connection.end_defer_constraint_checks()

                            if commit:
                                transaction.rollback(using=using)
                                transaction.leave_transaction_management(
                                    using=using)
                            return
                        else:
                            fixture_count += 1
                            objects_in_fixture = 0
                            loaded_objects_in_fixture = 0
                            if verbosity > 0:
                                self.stdout.write("Installing %s fixture '%s' from %s.\n" % \
                                    (format, fixture_name, humanize(fixture_dir)))
                            try:
                                objects = serializers.deserialize(format,
                                                                  fixture,
                                                                  using=using)
                                for obj in objects:
                                    objects_in_fixture += 1
                                    if router.allow_syncdb(
                                            using, obj.object.__class__):
                                        loaded_objects_in_fixture += 1
                                        models.add(obj.object.__class__)
                                        obj.save(using=using)
                                loaded_object_count += loaded_objects_in_fixture
                                fixture_object_count += objects_in_fixture
                                label_found = True
                            except (SystemExit, KeyboardInterrupt):
                                raise
                            except Exception:
                                import traceback
                                fixture.close()

                                # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                                connection.end_defer_constraint_checks()

                                if commit:
                                    transaction.rollback(using=using)
                                    transaction.leave_transaction_management(
                                        using=using)
                                if show_traceback:
                                    traceback.print_exc()
                                else:
                                    self.stderr.write(
                                        self.style.ERROR(
                                            "Problem installing fixture '%s': %s\n"
                                            % (full_path, ''.join(
                                                traceback.format_exception(
                                                    sys.exc_type,
                                                    sys.exc_value,
                                                    sys.exc_traceback)))))
                                return
                            fixture.close()

                            # If the fixture we loaded contains 0 objects, assume that an
                            # error was encountered during fixture loading.
                            if objects_in_fixture == 0:
                                self.stderr.write(
                                    self.style.ERROR(
                                        "No fixture data found for '%s'. (File format may be invalid.)\n"
                                        % (fixture_name)))
                                # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                                connection.end_defer_constraint_checks()

                                if commit:
                                    transaction.rollback(using=using)
                                    transaction.leave_transaction_management(
                                        using=using)
                                return

                    except Exception, e:
                        if verbosity > 1:
                            self.stdout.write("No %s fixture '%s' in %s.\n" % \
                                (format, fixture_name, humanize(fixture_dir)))
Пример #45
0
 def __enter__(self):
     t.enter_transaction_management(using=self.using)
     t.managed(True, using=self.using)
Пример #46
0
def _make_history_archive():
    transaction.enter_transaction_management()
    transaction.managed()
    transaction.commit()
    date_start = datetime.datetime.now() - datetime.timedelta(days=8)
    sql = """
        SELECT MIN(id) AS min_id, MAX(id) AS max_id
        FROM services_servicehistory
        WHERE created >= %s AND created <= %s
        ORDER BY id DESC LIMIT 1
    """
    cursor = connection.cursor()
    cursor.execute(sql, [
        date_start.strftime("%Y-%m-%d 00:00:01"),
        date_start.strftime("%Y-%m-%d 23:59:59"),
    ])
    row = cursor.fetchone()
    if row is None:
        return
    min_deleted_id = row[0]
    max_deleted_id = row[1]
    if not min_deleted_id or not max_deleted_id:
        return
    sql = """
        INSERT INTO services_servicehistoryarchive (
            response_time,
            namelookup_time,
            connect_time,
            pretransfer_time,
            starttransfer_time,
            redirect_time,
            size_download,
            speed_download,
            redirect_count,
            num_connects,
            created,
            service_id,
            agent_id
        )
        SELECT
            ROUND(AVG(response_time), 2) AS response_time,
            ROUND(AVG(namelookup_time), 2) AS namelookup_time,
            ROUND(AVG(connect_time), 2) AS connect_time,
            ROUND(AVG(pretransfer_time), 2) AS pretransfer_time,
            ROUND(AVG(starttransfer_time), 2) AS starttransfer_time,
            ROUND(AVG(redirect_time), 2) AS redirect_time,
            ROUND(AVG(size_download), 0) AS size_download,
            ROUND(AVG(speed_download), 0) AS speed_download,
            ROUND(AVG(redirect_count), 0) AS redirect_count,
            ROUND(AVG(num_connects), 0) AS num_connects,
            CASE
                WHEN MINUTE(created) >= 45 THEN date_format(created, '%%Y-%%m-%%d %%H:45')
                WHEN MINUTE(created) < 45 AND MINUTE(created) >= 30 THEN date_format(created, '%%Y-%%m-%%d %%H:30')
                WHEN MINUTE(created) < 30 AND MINUTE(created) >= 15 THEN date_format(created, '%%Y-%%m-%%d %%H:15')
                ELSE date_format(created, '%%Y-%%m-%%d %%H:00')
            END AS created_at,
            service_id,
            agent_id
        FROM
            services_servicehistory
        WHERE
            created >= %s AND created <= %s
        GROUP BY
            created_at, service_id, agent_id;
    """
    try:
        cursor.execute(sql, [
            date_start.strftime("%Y-%m-%d 00:00:01"),
            date_start.strftime("%Y-%m-%d 23:59:59"),
        ])
    except DatabaseError:
        transaction.rollback()
        return
    sql = """
        DELETE FROM services_servicehistoryextra
        WHERE service_history_id >= %s AND service_history_id <= %s
    """
    try:
        cursor.execute(sql, [min_deleted_id, max_deleted_id])
    except DatabaseError:
        transaction.rollback()
        return
    sql = """
        SELECT
            partition_name
        FROM INFORMATION_SCHEMA.PARTITIONS
        WHERE
            table_schema=%s AND
            table_name='services_servicehistory' AND
            partition_name<>'p_other'
        ORDER BY partition_name ASC
    """
    try:
        cursor.execute(sql, [settings.DATABASES['default']['NAME']])
    except DatabaseError:
        transaction.rollback()
        return
    current_partitions = []
    for row in cursor.fetchall():
        current_partitions.append(row[0])
    partition_to_delete = (
        date_start + datetime.timedelta(days=1)
    ).strftime("p%Y%m%d")
    if partition_to_delete not in current_partitions:
        return
    sql = "ALTER TABLE services_servicehistory DROP PARTITION %s" % (
        partition_to_delete,
    )
    try:
        cursor.execute(sql)
    except DatabaseError:
        transaction.rollback()
        return
    transaction.commit()
Пример #47
0
    def import_data(self,
                    dataset,
                    dry_run=False,
                    raise_errors=False,
                    use_transactions=None):
        """
        Imports data from ``dataset``.

        ``use_transactions``
            If ``True`` import process will be processed inside transaction.
            If ``dry_run`` is set, or error occurs, transaction will be rolled
            back.
        """
        result = Result()

        if use_transactions is None:
            use_transactions = self.get_use_transactions()

        if use_transactions is True:
            # when transactions are used we want to create/update/delete object
            # as transaction will be rolled back if dry_run is set
            real_dry_run = False
            transaction.enter_transaction_management()
            transaction.managed(True)
        else:
            real_dry_run = dry_run

        instance_loader = self._meta.instance_loader_class(self, dataset)

        for row in dataset.dict:
            try:
                row_result = RowResult()
                instance, new = self.get_or_init_instance(instance_loader, row)
                if new:
                    row_result.import_type = RowResult.IMPORT_TYPE_NEW
                else:
                    row_result.import_type = RowResult.IMPORT_TYPE_UPDATE
                row_result.new_record = new
                original = deepcopy(instance)
                if self.for_delete(row, instance):
                    if new:
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                        row_result.diff = self.get_diff(
                            None, None, real_dry_run)
                    else:
                        row_result.import_type = RowResult.IMPORT_TYPE_DELETE
                        self.delete_instance(instance, real_dry_run)
                        row_result.diff = self.get_diff(
                            original, None, real_dry_run)
                else:
                    self.import_obj(instance, row)
                    self.save_instance(instance, real_dry_run)
                    self.save_m2m(instance, row, real_dry_run)
                    row_result.diff = self.get_diff(original, instance,
                                                    real_dry_run)
            except Exception, e:
                tb_info = traceback.format_exc(sys.exc_info()[2])
                row_result.errors.append(Error(repr(e), tb_info))
                if raise_errors:
                    if use_transactions:
                        transaction.rollback()
                        transaction.leave_transaction_management()
                    raise
            result.rows.append(row_result)
Пример #48
0
def setupBridge(master, settings, config):
    '''Setup the bridget between buildbot and the database.

    This is also the closure in which all things happen that depend
    on the given settings.
    '''

    # allow settings to be none for tests
    if settings is not None:
        os.environ['DJANGO_SETTINGS_MODULE'] = settings

    import bb2mbdb.utils
    reload(bb2mbdb.utils)
    import mbdb.models
    reload(mbdb.models)
    from bb2mbdb.utils import modelForSource, modelForChange, modelForLog, \
        timeHelper
    from mbdb.models import Master, Slave, Builder, BuildRequest, Build

    from django.db import transaction

    try:
        # hack around the lack of @transaction.commit_manually
        transaction.enter_transaction_management()
        transaction.managed(True)
        dbm, new_master = Master.objects.get_or_create(name=master)
        transaction.commit()
    except:
        transaction.rollback()
        raise
    finally:
        transaction.leave_transaction_management()

    class Scheduler(BaseScheduler):
        @transaction.commit_on_success
        def addChange(self, change):
            dbchange = modelForChange(dbm, change)
            log.msg('ADDED CHANGE to DB, %d' % dbchange.number)

        def listBuilderNames(self):
            # Sadly, we need this. Buildbot is going to complain that we
            # don't build. What does he know.
            return []

    if 'schedulers' not in config:
        config['schedulers'] = []
    config['schedulers'].insert(0, Scheduler('bb2mbdb'))

    class StepReceiver(StatusReceiver):
        '''Build- and StatusReceiver helper objects to receive all
        events for a particular step.
        '''
        def __init__(self, dbstep, basedir):
            self.step = dbstep
            self.basedir = basedir

        @transaction.commit_on_success
        def stepTextChanged(self, build, step, text):
            self.step.text = text
            self.step.save()

        @transaction.commit_on_success
        def stepText2Changed(self, build, step, text2):
            self.step.text2 = text2
            self.step.save()

        @transaction.commit_on_success
        def logStarted(self, build, step, log):
            self.log = modelForLog(self.step, log, self.basedir)

        def logChunk(self, build, step, log, channel, text):
            pass

        @transaction.commit_on_success
        def logFinished(self, build, step, log):
            self.log.isFinished = True
            self.log.save()
            pass

        def stepETAUpdate(self, build, step, ETA, expectations):
            '''TODO: ETA support.
            '''
            pass

    class BuildReceiver(StatusReceiver):
        '''StatusReceiver helper object to receive all events
        for a particular build.
        Caches the database model object.
        '''
        def __init__(self, dbbuild, basedir):
            self.build = dbbuild
            self.basedir = basedir
            self.latestStep = self.latestDbStep = None

        @transaction.commit_manually
        def stepStarted(self, build, step):
            self.latestStep = step
            starttime = timeHelper(step.getTimes()[0])
            self.latestDbStep = self.build.steps.create(name=step.getName(),
                                                        starttime=starttime,
                                                        text=step.getText(),
                                                        text2=step.text2)
            transaction.commit()
            return StepReceiver(self.latestDbStep, self.basedir)

        @transaction.commit_on_success
        def stepFinished(self, build, step, results):
            assert step == self.latestStep, "We lost a step somewhere"
            try:
                self.latestStep = None
                self.latestDbStep.endtime = timeHelper(step.getTimes()[1])
                # only the first is the result, the second is text2,
                # ignore that.
                self.latestDbStep.result = results[0]
                self.latestDbStep.text = step.getText()
                self.latestDbStep.text2 = step.text2
                self.latestDbStep.save()
                self.latestDbStep = None
            except Exception, e:
                log.msg(str(e))
            pass

        def buildETAUpdate(self, build, ETA):
            '''TODO: ETA support.
            '''
            pass
 def process_request(self, request):
     """Enters transaction management"""
     transaction.enter_transaction_management()
     transaction.managed(True)
Пример #50
0
 def nocommit(using=None):
     t.enter_transaction_management(using=using)
     t.managed(True, using=using)
     yield
     t.rollback()
     t.leave_transaction_management(using=using)
Пример #51
0
 def testSetUp(cls):
     transaction.enter_transaction_management()
     transaction.managed(True)
Пример #52
0
        m = re.match("^# dbdump v(\d+) - (\d+) objects$", line)
        if not m:
            sys.stderr.write("Unknown dump format\n")
            sys.exit(1)

        version = int(m.group(1))
        totalobjs = int(m.group(2))
        i = 0
        prev_pct = -1

        if version != 1:
            sys.stderr.write("Unknown dump version\n")
            sys.exit(1)

        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)
        transaction_setup = True

        print "Importing new style dump format (v%s)" % version
        for line in f.xreadlines():
            if line[0] == "{":
                for obj in serializers.deserialize("json", "[%s]" % line):
                    try:
                        obj.save()
                    except Exception, e:
                        sys.stderr.write("Error: %s\n" % e)
                        sys.stderr.write("Line %s: '%s'" % (i, line))
            elif line[0] != "#":
                sys.stderr.write("Junk data on line %s" % i)
Пример #53
0
def migrate_model(processor, model, fields):
    from ella.core.models import Publishable
    model = get_model(*model.split('.'))
    ct = ContentType.objects.get_for_model(model)
    if model == Publishable:
        ct = None
    print 'processing', model._meta, ':',
    sys.stdout.flush()

    converted = 0
    deps = 0

    try:
        enter_transaction_management()
        managed(True)

        try:
            for m in model.objects.order_by().iterator():
                if not ct:  # publishable
                    ct = ContentType.objects.get_for_id(m.content_type_id)
                sys.stdout.write('.')
                converted += 1

                # commit every 1000 iterations
                if (converted % 1000) == 0 and is_dirty():
                    commit()
                    sys.stdout.write('C')
                    sys.stdout.flush()

                dirty = False
                for f in fields:
                    val = getattr(m, f)
                    if val:
                        val, cnt = BOX_RE.subn(update_field(m, ct), val)
                        if cnt > 0:
                            deps += cnt
                            setattr(m, f, val)
                            dirty = True

                SourceText.objects.extract_from_instance(m,
                                                         processor,
                                                         fields,
                                                         content_type=ct,
                                                         force_save=dirty,
                                                         force_create=True)
        except:
            # rollback and propagate if something goes wrong
            if is_dirty():
                rollback()
            raise
        else:
            # commit at the end
            if is_dirty():
                commit()
    finally:
        leave_transaction_management()

    print
    print 'DONE converted %d (%d reported dependencies)' % (
        converted,
        deps,
    )
    sys.stdout.flush()
Пример #54
0
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError("Expected exactly 1 argument - Experiment ID")
        try:
            exp = Experiment.objects.get(pk=int(args[0]))
        except Experiment.DoesNotExist:
            raise CommandError("Experiment ID %s not found" % args[0])

        # FIXME - we are fetch a bunch of stuff outside of any transaction, and then
        # doing the deletes in a transaction.  There is an obvious race condition here
        # that may result in components of an experiment not being deleted or being deleted
        # when they shouldn't be.

        # Fetch Datasets and Datafiles and work out which ones would be deleted
        datasets = Dataset.objects.filter(experiments__id=exp.id)
        datafiles = Dataset_File.objects.filter(
            dataset__id__in=map((lambda ds: ds.id), datasets))
        uniqueDatasets = filter((lambda ds: ds.experiments.count() == 1),
                                datasets)
        uniqueDatasetIds = map((lambda ds: ds.id), uniqueDatasets)
        uniqueDatafiles = filter(
            (lambda df: df.dataset.id in uniqueDatasetIds), datafiles)

        # Fetch other stuff to be printed and deleted.
        acls = ExperimentACL.objects.filter(experiment=exp)
        authors = Author_Experiment.objects.filter(experiment=exp)
        epsets = ExperimentParameterSet.objects.filter(experiment=exp)

        confirmed = options.get('confirmed', False)
        listOnly = options.get('list', False)
        if not listOnly and not confirmed:
            self.stdout.write("Delete the following experiment?\n\n")

        if listOnly or not confirmed:
            # Print basic experiment information
            self.stdout.write("Experiment\n    ID: {0}\n".format(exp.id))
            self.stdout.write("    Title: {0}\n".format(exp.title))
            self.stdout.write("    Locked: {0}\n".format(exp.locked))
            self.stdout.write("    Public Access: {0}\n".format(
                exp.public_access))

            # List experiment authors
            self.stdout.write("    Authors:\n")
            for author in authors:
                self.stdout.write("        {0}\n".format(author.author))

            # List experiment metadata
            for epset in epsets:
                self.stdout.write("    Param Set: {0} - {1}\n".format(
                    epset.schema.name, epset.schema.namespace))
                params = ExperimentParameter.objects.filter(parameterset=epset)
                for param in params:
                    self.stdout.write("        {0} = {1}\n".format(
                        param.name.full_name, param.get()))

            # List experiment ACLs
            self.stdout.write("    ACLs:\n")
            for acl in acls:
                self.stdout.write("        {0}-{1}, flags: ".format(
                    acl.pluginId, acl.entityId))
                if acl.canRead:
                    self.stdout.write("R")
                if acl.canWrite:
                    self.stdout.write("W")
                if acl.canDelete:
                    self.stdout.write("D")
                if acl.isOwner:
                    self.stdout.write("O")
                self.stdout.write("\n")

            # Basic Statistics
            self.stdout.write(
                "    {0} total dataset(s), containing {1} file(s)\n".format(
                    datasets.count(), datafiles.count()))
            self.stdout.write(
                "    {0} non-shared dataset(s), containing {1} file(s)\n".
                format(len(uniqueDatasets), len(uniqueDatafiles)))
            if len(uniqueDatasets) > 0 and not listOnly:
                self.stdout.write(
                    "        (The non-shared datasets and files will be deleted)\n"
                )

        # If the user has only requested a listing finish now
        if listOnly:
            return

        if not confirmed:
            # User must enter "yes" to proceed
            self.stdout.write("\n\nConfirm Deletion? (yes): ")
            ans = sys.stdin.readline().strip()
            if ans != "yes":
                self.stdout.write("'yes' not entered, aborting.\n")
                return

        # Consider the entire experiment deletion atomic
        using = options.get('database', DEFAULT_DB_ALIAS)
        transaction.commit_unless_managed(using=using)
        transaction.enter_transaction_management(using=using)
        transaction.managed(True, using=using)

        try:
            acls.delete()
            epsets.delete()
            for dataset in datasets:
                dataset.experiments.remove(exp.id)
                if dataset.experiments.count() == 0:
                    DatasetParameterSet.objects.filter(
                        dataset=dataset).delete()
                    for datafile in Dataset_File.objects.filter(
                            dataset=dataset):
                        DatafileParameterSet.objects.filter(
                            dataset_file=datafile).delete()
                        datafile.delete()
                    dataset.delete()
            authors.delete()
            exp.delete()

            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)
        except Exception:
            transaction.rollback(using=using)
            exc_class, exc, tb = sys.exc_info()
            new_exc = CommandError(
                "Exception %s has occurred: rolled back transaction" %
                (exc or exc_class))
            raise new_exc.__class__, new_exc, tb
Пример #55
0
and data currently in the %r database, and may result in
IRREVERSABLE DATA LOSS. Evolutions should be *thoroughly* reviewed
prior to execution.

Are you sure you want to execute the evolutions?

Type 'yes' to continue, or 'no' to cancel: """ % database)
                else:
                    confirm = 'yes'

                if is_multi_db():
                    from django.db import connections

                if confirm.lower() == 'yes':
                    # Begin Transaction
                    transaction.enter_transaction_management(**using_args)
                    transaction.managed(flag=True, **using_args)

                    if is_multi_db():
                        cursor = connections[database].cursor()
                    else:
                        cursor = connection.cursor()

                    try:
                        # Perform the SQL
                        #                        execute_sql(cursor, sql)
                        for e in do_actions:
                            e.doAction()

                        # Now update the evolution table
                        version = Version(signature=current_signature)
Пример #56
0
    def handle(self, *fixture_files, **options):

        using = options.get('database', DEFAULT_DB_ALIAS)
        mode = options.get('mode', 'append')
        items_into_tree = options.get('into_tree', None)

        if items_into_tree is not None:
            try:
                items_into_tree = Tree.objects.get(alias=items_into_tree)
            except ObjectDoesNotExist:
                raise CommandError(
                    'Target tree alised by `%s` does not exist. Please create it before import.'
                    % items_into_tree)
            else:
                mode = 'append'

        connection = connections[using]
        cursor = connection.cursor()

        self.style = no_style()

        transaction.commit_unless_managed(using=using)
        transaction.enter_transaction_management(using=using)
        transaction.managed(True, using=using)

        loaded_object_count = 0

        if mode == 'replace':
            try:
                Tree.objects.all().delete()
                TreeItem.objects.all().delete()
            except ObjectDoesNotExist:
                pass

        for fixture_file in fixture_files:

            self.stdout.write('Loading fixture from `%s` ...\n' % fixture_file)

            fixture = file(fixture_file, 'r')

            try:
                objects = serializers.deserialize('json', fixture, using=using)
            except (SystemExit, KeyboardInterrupt):
                raise

            trees = []
            tree_items = defaultdict(list)
            tree_item_parents = defaultdict(list)
            tree_items_new_indexes = {}

            for obj in objects:
                if router.allow_syncdb(using, obj.object.__class__):
                    if isinstance(obj.object, (Tree, TreeItem)):
                        if isinstance(obj.object, Tree):
                            trees.append(obj.object)
                        else:
                            if items_into_tree is not None:
                                obj.object.tree_id = items_into_tree.id
                            tree_items[obj.object.tree_id].append(obj.object)
                            tree_item_parents[obj.object.parent_id].append(
                                obj.object.id)

            if items_into_tree is not None:
                trees = [
                    items_into_tree,
                ]

            try:

                for tree in trees:

                    self.stdout.write('\nImporting tree `%s` ...\n' %
                                      tree.alias)
                    orig_tree_id = tree.id

                    if items_into_tree is None:
                        if mode == 'append':
                            tree.pk = None
                            tree.id = None

                        tree.save(using=using)
                        loaded_object_count += 1

                    parents_ahead = []

                    for tree_item in tree_items[orig_tree_id]:
                        parent_ahead = False
                        self.stdout.write('Importing item `%s` ...\n' %
                                          tree_item.title)
                        tree_item.tree_id = tree.id
                        orig_item_id = tree_item.id

                        if mode == 'append':
                            tree_item.pk = None
                            tree_item.id = None

                            if tree_item.id in tree_items_new_indexes:
                                tree_item.pk = tree_item.id = tree_items_new_indexes[
                                    tree_item.id]

                            if tree_item.parent_id is not None:
                                if tree_item.parent_id in tree_items_new_indexes:
                                    tree_item.parent_id = tree_items_new_indexes[
                                        tree_item.parent_id]
                                else:
                                    parent_ahead = True

                        tree_item.save(using=using)
                        loaded_object_count += 1

                        if mode == 'append':
                            tree_items_new_indexes[orig_item_id] = tree_item.id
                            if parent_ahead:
                                parents_ahead.append(tree_item)

                    # Second pass is necessary for tree items being imported before their parents.
                    for tree_item in parents_ahead:
                        tree_item.parent_id = tree_items_new_indexes[
                            tree_item.parent_id]
                        tree_item.save(using=using)

            except (SystemExit, KeyboardInterrupt):
                raise

            except Exception:
                import traceback
                fixture.close()
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
                self.stderr.write(
                    self.style.ERROR('Fixture `%s` import error: %s\n' %
                                     (fixture_file, ''.join(
                                         traceback.format_exception(
                                             sys.exc_type, sys.exc_value,
                                             sys.exc_traceback)))))

            fixture.close()

        # Reset DB sequences, for DBMS with sequences support.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                self.style, [Tree, TreeItem])
            if sequence_sql:
                self.stdout.write('Resetting DB sequences ...\n')
                for line in sequence_sql:
                    cursor.execute(line)

        transaction.commit(using=using)
        transaction.leave_transaction_management(using=using)

        connection.close()
Пример #57
0
 def test_atomic_prevents_calling_transaction_management_methods(self):
     with transaction.atomic():
         with self.assertRaises(transaction.TransactionManagementError):
             transaction.enter_transaction_management()
         with self.assertRaises(transaction.TransactionManagementError):
             transaction.leave_transaction_management()
Пример #58
0
    def beforeTest(self, test):
        """
        Load any database fixtures, set up any test url configurations and
        prepare for using transactions for database rollback if possible.
        """
        if not self.settings_path:
            # short circuit if no settings file can be found
            return

        from django.contrib.sites.models import Site
        from django.contrib.contenttypes.models import ContentType
        from django.core.management import call_command
        from django.core.urlresolvers import clear_url_caches
        from django.conf import settings
        from django.db import transaction

        use_transaction_isolation = self._should_use_transaction_isolation(
            test, settings)

        if use_transaction_isolation:
            self.call_plugins_method('beforeTransactionManagement', settings, test)
            transaction.enter_transaction_management()
            transaction.managed(True)
            self.disable_transaction_support(transaction)

        Site.objects.clear_cache()
        ContentType.objects.clear_cache() # Otherwise django.contrib.auth.Permissions will depend on deleted ContentTypes

        if use_transaction_isolation:
            self.call_plugins_method('afterTransactionManagement', settings, test)

        self.call_plugins_method('beforeFixtureLoad', settings, test)
        if isinstance(test, nose.case.Test):
            # Mirrors django.test.testcases:TestCase

            if hasattr(test.context, 'fixtures'):
                # We have to use this slightly awkward syntax due to the fact
                # that we're using *args and **kwargs together.
                ordered_fixtures = sorted(test.context.fixtures)
                if ordered_fixtures != self._loaded_test_fixtures:
                    # Only clear + load the fixtures if they're not already loaded

                    # Flush previous fixtures
                    if use_transaction_isolation:
                        self.restore_transaction_support(transaction)

                    self._flush_db()

                    if use_transaction_isolation:
                        transaction.commit()
                        self.disable_transaction_support(transaction)

                    # Load the new fixtures
                    logger.debug("Loading fixtures: %s", test.context.fixtures)
                    if use_transaction_isolation:
                        call_command(
                            'loaddata',
                            *test.context.fixtures,
                            **{'verbosity': 0, 'commit': False}
                        )
                        self.restore_transaction_support(transaction)
                        transaction.commit()
                        self.disable_transaction_support(transaction)
                    else:
                        call_command(
                            'loaddata',
                            *test.context.fixtures,
                            **{'verbosity': 0}
                        )
                    self._num_fixture_loads += 1
                    self._loaded_test_fixtures = ordered_fixtures
Пример #59
0
 def setUp(self):
     transaction.enter_transaction_management()
Пример #60
0
def update_imported_docs(project, version):
    """
    Check out or update the given project's repository.
    """
    update_docs_output = {}
    if not project.vcs_repo():
        raise ProjectImportError("Repo type '{repo_type}' unknown".format(
            repo_type=project.repo_type))

    if version:
        log.info('Checking out version {slug}: {identifier}'.format(
            slug=version.slug, identifier=version.identifier))
        version_slug = version.slug
        version_repo = project.vcs_repo(version_slug)
        update_docs_output['checkout'] = version_repo.checkout(
            version.identifier)
    else:
        log.info('Updating to latest revision')
        version_slug = 'latest'
        version_repo = project.vcs_repo(version_slug)
        update_docs_output['checkout'] = version_repo.update()

    # Ensure we have a conf file (an exception is raised if not)
    conf_file = project.conf_file(version.slug)

    #Do Virtualenv bits:
    if project.use_virtualenv:
        update_docs_output['venv'] = run(
            '{cmd} --distribute --never-download --no-site-packages {path}'.
            format(cmd='virtualenv',
                   path=project.venv_path(version=version_slug)))
        update_docs_output['sphinx'] = run('{cmd} install -U sphinx'.format(
            cmd=project.venv_bin(version=version_slug, bin='pip')))

        if project.requirements_file:
            os.chdir(project.checkout_path(version_slug))
            update_docs_output['requirements'] = run(
                '{cmd} install -r {requirements}'.format(
                    cmd=project.venv_bin(version=version_slug, bin='pip'),
                    requirements=project.requirements_file))
        os.chdir(project.checkout_path(version_slug))
        update_docs_output['install'] = run(
            '{cmd} setup.py install --force'.format(
                cmd=project.venv_bin(version=version_slug, bin='python')))

    # check tags/version
    #XXX:dc: what in this block raises the values error?
    try:
        if version_repo.supports_tags:
            transaction.enter_transaction_management(True)
            tags = version_repo.tags
            old_tags = [
                obj['identifier']
                for obj in api.version.get(project__slug=project.slug,
                                           limit=50)['objects']
            ]
            for tag in tags:
                if tag.identifier in old_tags:
                    continue
                slug = slugify_uniquely(Version,
                                        tag.verbose_name,
                                        'slug',
                                        255,
                                        project=project)
                try:

                    api.version.post(
                        dict(project="/api/v1/project/%s/" % project.pk,
                             slug=slug,
                             identifier=tag.identifier,
                             verbose_name=tag.verbose_name))
                    log.info("New tag found: {0}".format(tag.identifier))
                    highest = project.highest_version['version']
                    ver_obj = mkversion(ver)
                    #TODO: Handle updating higher versions automatically.
                    #This never worked very well, anyways.
                    if highest and ver_obj and ver_obj > highest:
                        log.info("Highest verison known, building docs")
                        update_docs.delay(ver.project.pk, version_pk=ver.pk)
                except Exception, e:
                    log.error("Failed to create version (tag)", exc_info=True)
                    transaction.rollback()
                    #break
            transaction.leave_transaction_management()
        if version_repo.supports_branches:
            transaction.enter_transaction_management(True)
            branches = version_repo.branches
            old_branches = [
                obj['identifier']
                for obj in api.version.get(project__slug=project.slug,
                                           limit=50)['objects']
            ]
            for branch in branches:
                if branch.identifier in old_branches:
                    continue
                slug = slugify_uniquely(Version,
                                        branch.verbose_name,
                                        'slug',
                                        255,
                                        project=project)
                try:
                    api.version.post(
                        dict(project="/api/v1/project/%s/" % project.pk,
                             slug=slug,
                             identifier=branch.identifier,
                             verbose_name=branch.verbose_name))
                    log.info("New branch found: {0}".format(branch.identifier))
                except Exception, e:
                    log.error("Failed to create version (branch)",
                              exc_info=True)
                    transaction.rollback()
                    #break
            transaction.leave_transaction_management()