def handle(self, dry_run=False, **options):
        verbosity = int(options['verbosity'])
        verbose = verbosity > 1

        # start transaction
        transaction.enter_transaction_management()
        transaction.managed(True)

        count = 0

        for document in models.Document.objects.filter(text_extracted=False):
            if verbose:
                print "PROCESS", repr(document)

            process_document_text(document)

            count += 1

        if verbose:
            print "\nIN SUMMARY".ljust(70, '=')
            print count, "documents records processed"
            print "\n"
        if dry_run:
            transaction.rollback()
        else:
            transaction.commit()

        transaction.leave_transaction_management()
Ejemplo n.º 2
0
def _force_leave(db_name):
    try:
        transaction.set_clean(using=db_name)
        transaction.leave_transaction_management(using=db_name)
    except:
        _logger.error('force leave error: %s', db_name)
        _logger.except_error()
Ejemplo n.º 3
0
 def process_response(self, request, response):
     """Commits and leaves transaction management."""
     if transaction.is_managed(using=self.get_tenant(request)):
         if transaction.is_dirty(using=self.get_tenant(request)):
             transaction.commit(using=self.get_tenant(request))
         transaction.leave_transaction_management(using=self.get_tenant(request))
     return response
Ejemplo n.º 4
0
 def commit(self):
     if self.done:
         return
     self.done = True
     transaction.commit(using=self.db)
     transaction.leave_transaction_management(using=self.db)
     print "Commiting"
Ejemplo n.º 5
0
 def update(self, **kwargs):
     """
     Updates all elements in the current QuerySet, setting all the given
     fields to the appropriate values.
     """
     assert self.query.can_filter(), \
             "Cannot update a query once a slice has been taken."
     query = self.query.clone(sql.UpdateQuery)
     query.add_update_values(kwargs)
     if not transaction.is_managed():
         transaction.enter_transaction_management()
         forced_managed = True
     else:
         forced_managed = False
     try:
         rows = query.execute_sql(None)
         if forced_managed:
             transaction.commit()
         else:
             transaction.commit_unless_managed()
     finally:
         if forced_managed:
             transaction.leave_transaction_management()
     self._result_cache = None
     return rows
Ejemplo n.º 6
0
 def _fixture_setup(self):
     transaction.commit_unless_managed()
     transaction.enter_transaction_management()
     transaction.managed(True)
     super(SeleniumTestCase, self)._fixture_setup()
     transaction.commit()
     transaction.leave_transaction_management()
Ejemplo n.º 7
0
    def handle(self, **options):
        '''The main entry point for the Django management command.'''

        import_start = datetime.datetime.now()
        # Start transaction management.
        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)
        try:
            self._promote_devel()
        except:
            self._rollback_db()
            raise
        # Finalize the transaction and close the db connection.
        transaction.commit()
        transaction.leave_transaction_management()
        connection.close()
        import_end = datetime.datetime.now()

        # Print a short summary of what we did.
        td = import_end - import_start
        print '\nProcessing complete in %s days, %s.%s seconds.' % (
          td.days, td.seconds, td.microseconds)
        print '  TraitData objects promoted: %s' % (
          PublicTraitData.objects.all().count(),)
Ejemplo n.º 8
0
 def test_savepoint_rollback(self):
     """Tests rollbacks of savepoints"""
     from django.db import transaction
     from testapp.models import Genre, Publisher
     from johnny import cache
     if not connection.features.uses_savepoints:
         return
     self.failUnless(transaction.is_managed() == False)
     self.failUnless(transaction.is_dirty() == False)
     connection.queries = []
     cache.local.clear()
     transaction.enter_transaction_management()
     transaction.managed()
     g = Genre.objects.get(pk=1)
     start_title = g.title
     g.title = "Adventures in Savepoint World"
     g.save()
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == "Adventures in Savepoint World")
     sid = transaction.savepoint()
     g.title = "In the Void"
     g.save()
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == "In the Void")
     transaction.savepoint_rollback(sid)
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == "Adventures in Savepoint World")
     transaction.rollback()
     g = Genre.objects.get(pk=1)
     self.failUnless(g.title == start_title)
     transaction.managed(False)
     transaction.leave_transaction_management()
Ejemplo n.º 9
0
def grab_db_lock(lock_name, wait):
    """
    Grab a lock using a new, temporary connection. Yields a "success"
    boolean indicating whether the lock was successfully acquired or not.

    This context manager ensures that multidb does not override the
    connection when accessing the Lock model and its transaction. The
    code that runs within this context should NOT be affected - it must
    run as usual, with normal multidb functionality intact.

    """

    with connections.get() as using:

        try:

            with connection_state.force(None):
                transaction.enter_transaction_management(using=using)
                transaction.managed(True, using=using)
                lock = Lock.grab(lock_name, wait=wait, using=using)

            success = bool(lock)

            try:
                yield success
            finally:
                if success:
                    with connection_state.force(None):
                        lock.release(using=using)
                        transaction.commit(using=using)

        finally:
            with connection_state.force(None):
                transaction.leave_transaction_management(using=using)
Ejemplo n.º 10
0
    def test_transaction_management(self):
        transaction.enter_transaction_management()
        self.assertFalse(connection.autocommit)
        self.assertEqual(connection.isolation_level, self._serializable)

        transaction.leave_transaction_management()
        self.assertTrue(connection.autocommit)
Ejemplo n.º 11
0
 def tearDown(self):
     from django.db import transaction
     if transaction.is_managed():
         if transaction.is_dirty():
             transaction.rollback()
         transaction.managed(False)
         transaction.leave_transaction_management()
Ejemplo n.º 12
0
def atomic(using=None):
    """Perform database operations atomically within a transaction.

    The caller can use this to ensure SQL statements are executed within
    a transaction and then cleaned up nicely if there's an error.

    This provides compatibility with all supported versions of Django.

    Args:
        using (str, optional):
            The database connection name to use. Defaults to the default
            database connection.
    """
    if hasattr(transaction, 'atomic'):
        # Django >= 1.5
        with transaction.atomic(using=using):
            yield
    else:
        # Django < 1.5
        assert hasattr(transaction, 'enter_transaction_management')

        try:
            # Begin Transaction
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

            yield

            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)
        except Exception:
            transaction.rollback(using=using)
            raise
Ejemplo n.º 13
0
 def process_response(self, request, response):
     """Commits and leaves transaction management."""
     if transaction.is_managed():
         if transaction.is_dirty():
             transaction.commit()
         transaction.leave_transaction_management()
     return response
Ejemplo n.º 14
0
 def __call__(self, request, *args, **kwargs):
     """
     By implementing the call method, we allow the class itself to act as
     the view function, taking in the request object and returning the
     response.  This method is responsible for dealing with transaction
     management, dispatching to the proper instance methods, and returning
     a valid HttpResponse.
     """
     if self.use_transactions:
         transaction.commit_unless_managed()
         transaction.enter_transaction_management()
         transaction.managed(True)
     
     context = self.view(request, *args, **kwargs)
     if isinstance(context, HttpResponseRedirect):
         return context
     context.update(self.extra_context)
     
     if self.next_field in request.REQUEST:
         next = request.REQUEST[self.next_field]
         if ':/' not in next:
             return HttpResponseRedirect(next)
     
     response = self.create_response(request, context)
     if self.use_transactions:
         transaction.commit()
         transaction.leave_transaction_management()
     
     return response
Ejemplo n.º 15
0
def execute_transaction(sql, output=False, database='default'):
    "A transaction wrapper for executing a list of SQL statements"
    my_connection = connection
    using_args = {}

    if is_multi_db():
        if not database:
            database = DEFAULT_DB_ALIAS

        my_connection = connections[database]
        using_args['using'] = database

    try:
        # Begin Transaction
        transaction.enter_transaction_management(**using_args)
        transaction.managed(True, **using_args)

        cursor = my_connection.cursor()

        # Perform the SQL
        if output:
            write_sql(sql, database)

        execute_sql(cursor, sql)

        transaction.commit(**using_args)
        transaction.leave_transaction_management(**using_args)
    except Exception:
        transaction.rollback(**using_args)
        raise
Ejemplo n.º 16
0
    def call_ranking(self, ranking, name, args):
        if ranking not in self.ranking_map:
            logging.warning('Attempted to call ranking, but not running: %s.%s', ranking.id, name)
            return

        logging.debug('Calling ranking: %s.%s', ranking.id, name)

        perf.begin('ranking')
        transaction.enter_transaction_management(True)
        transaction.managed(True)
        try:
            getattr(self.ranking_map[ranking], name)(*args)
        except:
            transaction.rollback()
            logging.exception('Ranking failed: %s', ranking.id)
            self.stop_ranking(ranking)
            try:
                ranking.header = 'Internal error'
                ranking.footer = ''
                ranking.save(force_update=True)
                RankingEntry.objects.filter(ranking=ranking).delete()
            except:
                transaction.rollback()
                logging.exception('Ranking cleanup failed: %s', ranking.id)
        transaction.commit()
        transaction.managed(False)
        transaction.leave_transaction_management()
        perf.end('ranking')
Ejemplo n.º 17
0
def serializerTest(format, self):
    # Clear the database first
    management.call_command('flush', verbosity=0, interactive=False)

    # Create all the objects defined in the test data
    objects = []
    transaction.enter_transaction_management()
    transaction.managed(True)
    for (func, pk, klass, datum) in test_data:
        objects.append(func[0](pk, klass, datum))
    transaction.commit()
    transaction.leave_transaction_management()

    # Add the generic tagged objects to the object list
    objects.extend(Tag.objects.all())

    # Serialize the test database
    serialized_data = serializers.serialize(format, objects, indent=2)

    # Flush the database and recreate from the serialized data
    management.call_command('flush', verbosity=0, interactive=False)
    transaction.enter_transaction_management()
    transaction.managed(True)
    for obj in serializers.deserialize(format, serialized_data):
        obj.save()
    transaction.commit()
    transaction.leave_transaction_management()

    # Assert that the deserialized data is the same
    # as the original source
    for (func, pk, klass, datum) in test_data:
        func[1](self, pk, klass, datum)
Ejemplo n.º 18
0
 def export_sales_order(self):
   transaction.enter_transaction_management(using=self.database)
   transaction.managed(True, using=self.database)
   try:
     starttime = time()
     if self.verbosity > 0:
       print("Exporting requested date of sales orders...")
     self.cursor.execute('''select substring(name from '^.*? '), max(plandate)
         from demand
         left outer join out_demand
           on demand.name = out_demand.demand
           and demand.subcategory = 'OpenERP'
           group by substring(name from '^.*? ')
        ''')
     cnt = 0
     for i, j in self.cursor.fetchall():
       result = self.sock.execute(self.openerp_db, self.uid, self.openerp_password, 'sale.order', 'write',
         [int(i)], {'requested_date': j and j.strftime('%Y-%m-%d') or 0,})
       cnt += 1
     if self.verbosity > 0:
       print("Updated %d sales orders in %.2f seconds" % (cnt, (time() - starttime)))
   except Exception as e:
     print("Error updating sales orders: %s" % e)
   finally:
     transaction.rollback(using=self.database)
     transaction.leave_transaction_management(using=self.database)
Ejemplo n.º 19
0
    def call_test_suite_result(self, test_suite_result, name, args):
        if test_suite_result not in self.test_suite_result_map:
            logging.warning('Attempted to call test suite result, but not running: %s.%s', test_suite_result.id, name)
            return

        logging.debug('Calling test suite result: %s.%s', test_suite_result.id, name)

        transaction.enter_transaction_management(True)
        transaction.managed(True)
        try:
            getattr(self.test_suite_result_map[test_suite_result], name)(*args)
        except:
            transaction.rollback()
            logging.exception('Test suite result failed: %s', test_suite_result.id)
            self.finished_test_suite_result(test_suite_result)
            try:
                test_suite_result.status = 'INT'
                test_suite_result.report = 'Internal error'
                test_suite_result.save(force_update=True)
            except:
                transaction.rollback()
                logging.exception('Test suite result cleanup failed: %s', test_suite_result.id)
        transaction.commit()
        transaction.managed(False)
        transaction.leave_transaction_management()
Ejemplo n.º 20
0
    def restore(self):
        self.state = BACKUP_RESTORE_STATE_IN_PROGRESS
        self.save()

        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)
        try:
            self.tar = tarfile.open(self.backup_file.path, 'r:gz')

            # extract data.xml for parsing
            self.stream = self.tar.extractfile('backup/data.xml')
            self.restore_objects()

            self.restore_files()

            transaction.commit()
            transaction.leave_transaction_management()
            self.state = BACKUP_RESTORE_STATE_DONE
        except Exception as e:
            transaction.rollback()
            transaction.leave_transaction_management()
            self.state = BACKUP_RESTORE_STATE_ERROR
            self.error_message = e.__unicode__()
            mail_subject = _('Restore failed')
            mail_message = _('Restore for %(user)s failed with message : %(message)s') % {'user': self.user,
                                                                                         'message': e}
            mail_admins(mail_subject, mail_message, fail_silently=(not settings.DEBUG))

        self.save()

        # close and delete archive
        self.tar.close()
        os.remove(self.backup_file.path)
        self.backup_file = None
Ejemplo n.º 21
0
 def _commit_on_success_unless_managed(*args, **kw):
     try:
         if transaction.is_managed():
             forced_managed = False
         else:
             transaction.enter_transaction_management()
             forced_managed = True
         
         try:
             res = func(*args, **kw)
         except:
             # All exceptions must be handled here (even string ones).
             if transaction.is_dirty():
                 if forced_managed:
                     transaction.rollback()
                 else:
                     transaction.rollback_unless_managed()
             raise
         else:
             if transaction.is_dirty():
                 if forced_managed:
                     transaction.commit()
                 else:
                     transaction.commit_unless_managed()
         return res
     finally:
         if forced_managed:
             transaction.leave_transaction_management()
Ejemplo n.º 22
0
    def wrapped_func(*args, **kwargs):
        enter_transaction_management(using=using)
        managed(True, using=using)

        try:
            res = func(*args, **kwargs)
        except:
            if is_dirty(using=using):
                rollback(using=using)
            raise
        else:
            if is_dirty(using=using):

                if not isinstance(res, HttpResponse) or res.status_code < 200 or res.status_code >= 400:
                    rollback(using=using)
                else:
                    try:
                        commit(using=using)
                    except:
                        rollback(using=using)
                        raise
        finally:
            leave_transaction_management(using=using)

        return res
Ejemplo n.º 23
0
    def test_transaction_rollback(self):
        """Tests johnny's handling of transaction rollbacks.

        Similar to the commit, this sets up a write to a db in a transaction,
        reads from it (to force a cache write of sometime), then rolls back."""
        from Queue import Queue as queue
        from django.db import transaction
        from testapp.models import Genre, Publisher
        from johnny import cache
        if settings.DATABASE_ENGINE == 'sqlite3':
            print "\n  Skipping test requiring multiple threads."
            return

        self.failUnless(transaction.is_managed() == False)
        self.failUnless(transaction.is_dirty() == False)
        connection.queries = []
        cache.local.clear()
        q = queue()
        other = lambda x: self._run_threaded(x, q)

        # load some data
        start = Genre.objects.get(id=1)
        other('Genre.objects.get(id=1)')
        hit, ostart = q.get()
        # these should be the same and should have hit cache
        self.failUnless(hit)
        self.failUnless(ostart == start)
        # enter manual transaction management
        transaction.enter_transaction_management()
        transaction.managed()
        start.title = 'Jackie Chan Novels'
        # local invalidation, this key should hit the localstore!
        nowlen = len(cache.local)
        start.save()
        self.failUnless(nowlen != len(cache.local))
        # perform a read OUTSIDE this transaction... it should still see the
        # old gen key, and should still find the "old" data
        other('Genre.objects.get(id=1)')
        hit, ostart = q.get()
        self.failUnless(hit)
        self.failUnless(ostart.title != start.title)
        # perform a READ inside the transaction;  this should hit the localstore
        # but not the outside!
        nowlen = len(cache.local)
        start2 = Genre.objects.get(id=1)
        self.failUnless(start2.title == start.title)
        self.failUnless(len(cache.local) > nowlen)
        transaction.rollback()
        # we rollback, and flush all johnny keys related to this transaction
        # subsequent gets should STILL hit the cache in the other thread
        # and indeed, in this thread.

        self.failUnless(transaction.is_dirty() == False)
        other('Genre.objects.get(id=1)')
        hit, ostart = q.get()
        self.failUnless(hit)
        start = Genre.objects.get(id=1)
        self.failUnless(ostart.title == start.title)
        transaction.managed(False)
        transaction.leave_transaction_management()
Ejemplo n.º 24
0
 def handle(self, *args, **options):
     print("Download zip-archive...")
     f = urlopen(IPGEOBASE_SOURCE_URL)
     buffer = BytesIO(f.read())
     f.close()
     print("Unpacking...")
     zip_file = ZipFile(buffer)
     cities_file_read = _read_file(zip_file, 'cities.txt')
     cidr_optim_file_read = _read_file(zip_file, 'cidr_optim.txt')
     zip_file.close()
     buffer.close()
     print("Start updating...")
     list_cities = cities_file_read.decode(IPGEOBASE_CODING).split('\n')
     list_cidr_optim = \
         cidr_optim_file_read.decode(IPGEOBASE_CODING).split('\n')
     lines = \
         _get_cidr_optim_with_cities_lines(list_cidr_optim, list_cities)
     cursor = connection.cursor()
     transaction.enter_transaction_management()
     try:
         transaction.managed(True)
         print("Delete old rows in table ipgeobase...")
         cursor.execute(DELETE_SQL)
         print ("Write new data...")
         cursor.executemany(INSERT_SQL, [l for l in lines if l])
         transaction.commit()
     except Exception as e:
         message = "The data not updated:", e
         if send_message:
             mail_admins(subject=ERROR_SUBJECT, message=message)
         raise CommandError(message)
     finally:
         transaction.rollback()
         transaction.leave_transaction_management()
     return "Table ipgeobase is update.\n"
Ejemplo n.º 25
0
    def test_transaction_management(self):
        transaction.enter_transaction_management()
        transaction.managed(True)
        self.assertEqual(connection.isolation_level, self._read_committed)

        transaction.leave_transaction_management()
        self.assertEqual(connection.isolation_level, self._autocommit)
Ejemplo n.º 26
0
    def handle(self, *args, **options):
        from django.db import transaction

        path, slug, name = args

        # Start transaction management.
        transaction.commit_unless_managed()
        transaction.enter_transaction_management()
        transaction.managed(True)

        book = Book.objects.get(slug=slug)

        root, ext = os.path.splitext(path)
        ext = ext.lower()
        if ext:
            ext = ext[1:]
            if ext == "zip":
                ext = "daisy"

        source_sha1 = BookMedia.read_source_sha1(path, ext)
        print "Source file SHA1:", source_sha1
        try:
            assert source_sha1
            bm = book.media.get(type=ext, source_sha1=source_sha1)
            print "Replacing media: %s (%s)" % (bm.name.encode("utf-8"), ext)
        except (AssertionError, BookMedia.DoesNotExist):
            bm = BookMedia(book=book, type=ext)
            print "Creating new media"
        bm.name = name
        bm.file.save(None, ExistingFile(path))
        bm.save()
        transaction.commit()
        transaction.leave_transaction_management()
Ejemplo n.º 27
0
    def handle(self, *args, **options):

        self.batch_size = options.get('batch_size', 50)
        self.preserve = options.get("preserve", False)
        self.index = options.get("index", False)
        self.newline = options.get("newline", False)
        
        if not self.index:
            old_realtime_indexing = getattr(settings, "REALTIME_INDEXING", None)
            #this is not recommended by the django manual, but in case of management command it seems to work
            settings.REALTIME_INDEXING = False 
        
        transaction.enter_transaction_management()
        transaction.managed(True)

        for records_url in args:
            print("Processing %s" % records_url)
            errors = self.process_url(records_url, options)
            print("Processing %s Done" % records_url)
            if errors:
                print("%d error(s) when processing %s, check your log file." % (len(errors), records_url))

        transaction.leave_transaction_management()
        
        if not self.index and old_realtime_indexing:
            settings.REALTIME_INDEXING = old_realtime_indexing
    def handle_noargs(self, **options):
        
        transaction.enter_transaction_management()
        transaction.managed()

        
        for thes in Thesaurus.objects.all():
            context = graph.get_context(URIRef(thes.uri))
            with Term.objects.disable_mptt_updates():  # @UndefinedVariable
                for i,(s,_,o) in enumerate(graph.triples((None, URIRef("http://www.w3.org/2004/02/skos/core#narrower"), None), context=context)):
                    print("%d - Thesaurus %s term pref label %s parent %s" % (i+1,thes.label, repr(o), repr(s)))
                    parent_term = Term.objects.get(uri=unicode(s))  # @UndefinedVariable
                    term = Term.objects.get(uri=unicode(o))  # @UndefinedVariable
                    term.tree_id = thes.id
                    term.parent = parent_term
                    term.save()

            Term.objects.filter(parent=None, thesaurus=thes).update(tree_id=thes.id)  # @UndefinedVariable
            
            print("Rebuilding tree %d" % thes.id)
            Term.objects.rebuild()  # @UndefinedVariable
            
            transaction.commit()
            reset_queries()
        

        transaction.leave_transaction_management()

            
Ejemplo n.º 29
0
 def export_sales_order(self, cursor):
   transaction.enter_transaction_management(using=self.database)
   try:
     starttime = time()
     if self.verbosity > 0:
       print("Exporting expected delivery date of sales orders...")
     cursor.execute('''select demand.source, max(plandate)
         from demand
         left outer join out_demand
           on demand.name = out_demand.demand
         where demand.subcategory = 'openbravo'
           and status = 'open'
         group by source
        ''')
     count = 0
     body = [
       '<?xml version="1.0" encoding="UTF-8"?>',
       '<ob:Openbravo xmlns:ob="http://www.openbravo.com">'
       ]
     for i in cursor.fetchall():
       body.append('<OrderLine id="%s"><description>Planned delivery date %s</description></OrderLine>' % i)
       count += 1
       if self.verbosity > 0 and count % 500 == 1:
         print('.', end="")
     if self.verbosity > 0:
       print ('')
     body.append('</ob:Openbravo>')
     self.post_data('/openbravo/ws/dal/OrderLine', '\n'.join(body))
     if self.verbosity > 0:
       print("Updated %d sales orders in %.2f seconds" % (count, (time() - starttime)))
   except Exception as e:
     raise CommandError("Error updating sales orders: %s" % e)
   finally:
     transaction.rollback(using=self.database)
     transaction.leave_transaction_management(using=self.database)
Ejemplo n.º 30
0
    def save(self):

        current_site = Site.objects.get_current()

        transaction.enter_transaction_management()
        transaction.managed(True)
        try:
            try:
                self.user.set_password(self.cleaned_data['new_password1'])
                self.user.save()
                message = loader.render_to_string('reset_password_email.txt', {
                    'user': self.user,
                    'raw_password': self.cleaned_data['new_password1'],
                    'full_server_url': 'http://%s/' % current_site.domain
                })
                msg = EmailMessage(
                        subject='New password for CHIRP Volunteer Tracker',
                        body=message,
                        to=[self.user.email]
                )
                msg.send(fail_silently=False)
            except:
                transaction.rollback()
                raise
            else:
                transaction.commit()
        finally:
            transaction.leave_transaction_management()

        return self.user
Ejemplo n.º 31
0
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError("Expected exactly 1 argument - Experiment ID")
        try:
            exp = Experiment.objects.get(pk=int(args[0]))
        except Experiment.DoesNotExist:
            raise CommandError("Experiment ID %s not found" % args[0])

        self.stdout.write("Delete the following experiment?\n\n")

        # Print basic experiment information
        self.stdout.write("Experiment\n    ID: {0}\n".format(exp.id))
        self.stdout.write("    Title: {0}\n".format(exp.title))
        self.stdout.write("    Public: {0}\n".format(exp.public))

        # List experiment authors
        authors = Author_Experiment.objects.filter(experiment=exp)
        self.stdout.write("    Authors:\n")
        for author in authors:
            self.stdout.write("        {0}\n".format(author.author))

        # List experiment metadata
        epsets = ExperimentParameterSet.objects.filter(experiment=exp)
        for epset in epsets:
            self.stdout.write("    Param Set: {0} - {1}\n".format(
                epset.schema.name, epset.schema.namespace))
            params = ExperimentParameter.objects.filter(parameterset=epset)
            for param in params:
                self.stdout.write("        {0} = {1}\n".format(
                    param.name.full_name, param.get()))

        # List experiment ACLs
        acls = ExperimentACL.objects.filter(experiment=exp)
        self.stdout.write("    ACLs:\n")
        for acl in acls:
            self.stdout.write("        {0}-{1}, flags: ".format(
                acl.pluginId, acl.entityId))
            if acl.canRead:
                self.stdout.write("R")
            if acl.canWrite:
                self.stdout.write("W")
            if acl.canDelete:
                self.stdout.write("D")
            if acl.isOwner:
                self.stdout.write("O")
            self.stdout.write("\n")

        # Basic Statistics
        datasets = Dataset.objects.filter(experiment=exp)
        datafiles = Dataset_File.objects.filter(dataset__experiment=exp)
        self.stdout.write("    {0} datset(s), containing {1} file(s)\n".format(
            datasets.count(), datafiles.count()))

        # If the user has only requested a listing finish now
        if options.get('list', False):
            return

        # User must enter "yes" to proceed
        self.stdout.write("\n\nConfirm Deletion? (yes): ")
        ans = sys.stdin.readline().strip()
        if ans != "yes":
            self.stdout.write("'yes' not entered, aborting.\n")
            return

        # Consider the entire experiment deletion atomic
        using = options.get('database', DEFAULT_DB_ALIAS)
        transaction.commit_unless_managed(using=using)
        transaction.enter_transaction_management(using=using)
        transaction.managed(True, using=using)

        try:
            acls.delete()
            epsets.delete()
            DatasetParameterSet.objects.filter(
                dataset__experiment=exp).delete()
            DatafileParameterSet.objects.filter(
                dataset_file__dataset__experiment=exp).delete()
            authors.delete()
            datasets.delete()
            datafiles.delete()
            exp.delete()

            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)
        except:
            transaction.rollback(using=using)
            raise CommandError("Exception occurred, rolling back transaction")
Ejemplo n.º 32
0
 def _post_teardown(self):
     transaction.rollback()
     transaction.leave_transaction_management()
Ejemplo n.º 33
0
 def process_exception(self, request, exception):
     """Abort the transaction on errors."""
     from django.db import transaction as django_transaction
     transaction.abort()
     django_transaction.set_clean()
     django_transaction.leave_transaction_management()
Ejemplo n.º 34
0
def delete_objects(seen_objs, using):
    """
    Iterate through a list of seen classes, and remove any instances that are
    referred to.
    """
    connection = connections[using]
    if not transaction.is_managed(using=using):
        transaction.enter_transaction_management(using=using)
        forced_managed = True
    else:
        forced_managed = False
    try:
        ordered_classes = seen_objs.keys()
    except CyclicDependency:
        # If there is a cyclic dependency, we cannot in general delete the
        # objects.  However, if an appropriate transaction is set up, or if the
        # database is lax enough, it will succeed. So for now, we go ahead and
        # try anyway.
        ordered_classes = seen_objs.unordered_keys()

    obj_pairs = {}
    try:
        for cls in ordered_classes:
            items = seen_objs[cls].items()
            items.sort()
            obj_pairs[cls] = items

            # Pre-notify all instances to be deleted.
            for pk_val, instance in items:
                if not cls._meta.auto_created:
                    signals.pre_delete.send(sender=cls, instance=instance)

            pk_list = [pk for pk,instance in items]
            del_query = sql.DeleteQuery(cls)
            del_query.delete_batch_related(pk_list, using=using)

            update_query = sql.UpdateQuery(cls)
            for field, model in cls._meta.get_fields_with_model():
                if (field.rel and field.null and field.rel.to in seen_objs and
                        filter(lambda f: f.column == field.rel.get_related_field().column,
                        field.rel.to._meta.fields)):
                    if model:
                        sql.UpdateQuery(model).clear_related(field, pk_list, using=using)
                    else:
                        update_query.clear_related(field, pk_list, using=using)

        # Now delete the actual data.
        for cls in ordered_classes:
            items = obj_pairs[cls]
            items.reverse()

            pk_list = [pk for pk,instance in items]
            del_query = sql.DeleteQuery(cls)
            del_query.delete_batch(pk_list, using=using)

            # Last cleanup; set NULLs where there once was a reference to the
            # object, NULL the primary key of the found objects, and perform
            # post-notification.
            for pk_val, instance in items:
                for field in cls._meta.fields:
                    if field.rel and field.null and field.rel.to in seen_objs:
                        setattr(instance, field.attname, None)

                if not cls._meta.auto_created:
                    signals.post_delete.send(sender=cls, instance=instance)
                setattr(instance, cls._meta.pk.attname, None)

        if forced_managed:
            transaction.commit(using=using)
        else:
            transaction.commit_unless_managed(using=using)
    finally:
        if forced_managed:
            transaction.leave_transaction_management(using=using)
 def leave(self):
     if db_transaction.is_dirty():
         db_transaction.rollback()
     db_transaction.leave_transaction_management(using=self.using)
Ejemplo n.º 36
0
    def import_data(self, dataset, dry_run=False, raise_errors=False,
            use_transactions=None):
        """
        Imports data from ``dataset``.

        ``use_transactions``
            If ``True`` import process will be processed inside transaction.
            If ``dry_run`` is set, or error occurs, transaction will be rolled
            back.
        """
        result = Result()
        result.diff_headers = self.get_diff_headers()

        if use_transactions is None:
            use_transactions = self.get_use_transactions()

        if use_transactions is True:
            # when transactions are used we want to create/update/delete object
            # as transaction will be rolled back if dry_run is set
            real_dry_run = False
            transaction.enter_transaction_management()
            transaction.managed(True)
        else:
            real_dry_run = dry_run

        instance_loader = self._meta.instance_loader_class(self, dataset)

        try:
            self.before_import(dataset, real_dry_run)
        except Exception as e:
            tb_info = traceback.format_exc(2)
            result.base_errors.append(Error(repr(e), tb_info))
            if raise_errors:
                if use_transactions:
                    transaction.rollback()
                    transaction.leave_transaction_management()
                raise

        for row in dataset.dict:
            try:
                row_result = RowResult()
                instance, new = self.get_or_init_instance(instance_loader, row)
                if new:
                    row_result.import_type = RowResult.IMPORT_TYPE_NEW
                else:
                    row_result.import_type = RowResult.IMPORT_TYPE_UPDATE
                row_result.new_record = new
                original = deepcopy(instance)
                if self.for_delete(row, instance):
                    if new:
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                        row_result.diff = self.get_diff(None, None,
                                real_dry_run)
                    else:
                        row_result.import_type = RowResult.IMPORT_TYPE_DELETE
                        self.delete_instance(instance, real_dry_run)
                        row_result.diff = self.get_diff(original, None,
                                real_dry_run)
                else:
                    self.import_obj(instance, row, real_dry_run)
                    if self.skip_row(instance, original):
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                    else:
                        self.save_instance(instance, real_dry_run)
                        self.save_m2m(instance, row, real_dry_run)
                        # Add object info to RowResult for LogEntry
                        row_result.object_repr = force_text(instance)
                        row_result.object_id = instance.pk
                    row_result.diff = self.get_diff(original, instance,
                            real_dry_run)
            except Exception as e:
                tb_info = traceback.format_exc(2)
                row_result.errors.append(Error(e, tb_info))
                if raise_errors:
                    if use_transactions:
                        transaction.rollback()
                        transaction.leave_transaction_management()
                    six.reraise(*sys.exc_info())
            if (row_result.import_type != RowResult.IMPORT_TYPE_SKIP or
                        self._meta.report_skipped):
                result.rows.append(row_result)

        if use_transactions:
            if dry_run or result.has_errors():
                transaction.rollback()
            else:
                transaction.commit()
            transaction.leave_transaction_management()

        return result
Ejemplo n.º 37
0
                model = obj.object.__class__
                if router.allow_syncdb(using, model):
                    models.add(model)
                    counter += 1
                    obj.save(using=using)
        if counter > 0:
            sequence_sql = connection.ops.sequence_reset_sql(style, models)
            if sequence_sql:
                for line in sequence_sql:
                    cursor.execute(line)
    except Exception, e:
        transaction.rollback(using=using)
        transaction.leave_transaction_management(using=using)
        raise e
    transaction.commit(using=using)
    transaction.leave_transaction_management(using=using)
    connection.close()
    return counter

########NEW FILE########
__FILENAME__ = views
# Copyright (c) 2009 Guilherme Gondim and contributors
#
# This file is part of Django Smuggler.
#
# Django Smuggler is free software under terms of the GNU Lesser
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README for copying conditions.

import os
from datetime import datetime
Ejemplo n.º 38
0
 def nocommit(using=None):
     t.enter_transaction_management(using=using)
     t.managed(True, using=using)
     yield
     t.rollback()
     t.leave_transaction_management(using=using)
Ejemplo n.º 39
0
def commit_work(run_transaction):
    if run_transaction:
        transaction.commit()
        transaction.leave_transaction_management()
        printv('Transaction committed')
    def handle(self, *fixture_labels, **options):

        ignore = options.get('ignore')
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line.")

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: open,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(upath(path))
            else:
                # It's a models.py module
                app_module_paths.append(upath(app.__file__))

        app_fixtures = [
            os.path.join(os.path.dirname(path), 'fixtures')
            for path in app_module_paths
        ]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats(
                        ):
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." %
                                              fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format."
                            % (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(
                            settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." %
                                              humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats,
                                             compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(p for p in [
                                fixture_name, database, format,
                                compression_format
                            ] if p)

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError(
                                            "Multiple fixtures named '%s' in %s. Aborting."
                                            % (fixture_name,
                                               humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(
                                        format,
                                        fixture,
                                        using=using,
                                        ignorenonexistent=ignore)

                                    for obj in objects:

                                        try:
                                            # Attempt to lookup any existing object using natural keys and
                                            # use that object's PK to duplicate and conflict records aren't created.
                                            nk = obj.object.natural_key()
                                            real_object = type(
                                                obj.object
                                            ).objects.get_by_natural_key(*nk)
                                            if real_object:
                                                obj.object.pk = real_object.pk
                                        except AttributeError:
                                            # Model class doesn't support natural keys.
                                            pass
                                        except type(obj.object).DoesNotExist:
                                            # No existing record, so proceed as normal.
                                            pass

                                        objects_in_fixture += 1
                                        if router.allow_syncdb(
                                                using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError,
                                                    IntegrityError) as e:
                                                e.args = (
                                                    "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s"
                                                    % {
                                                        'app_label':
                                                        obj.object._meta.
                                                        app_label,
                                                        'object_name':
                                                        obj.object._meta.
                                                        object_name,
                                                        'pk':
                                                        obj.object.pk,
                                                        'error_msg':
                                                        force_text(e)
                                                    }, )
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = (
                                            "Problem installing fixture '%s': %s"
                                            % (full_path, e), )
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)"
                                        % (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e, )
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" %
                                  (loaded_object_count, fixture_count))
            else:
                self.stdout.write(
                    "Installed %d object(s) (of %d) from %d fixture(s)" %
                    (loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()
Ejemplo n.º 41
0
    def handle(self, *fixture_labels, **options):
        from django.db.models import get_apps
        from django.core import serializers
        from django.db import connection, transaction
        from django.conf import settings

        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed()
            transaction.enter_transaction_management()
            transaction.managed(True)

        self.disable_forward_ref_checks(cursor)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: file,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_fixtures = [
            os.path.join(os.path.dirname(app.__file__), 'fixtures')
            for app in get_apps()
        ]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                self.enable_forward_ref_checks(cursor)
                sys.stderr.write(
                    self.style.ERROR(
                        "Problem installing fixture '%s': %s is not a known serialization format."
                        % (fixture_name, format)))
                transaction.rollback()
                transaction.leave_transaction_management()
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(
                    settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for format in formats:
                    for compression_format in compression_formats:
                        if compression_format:
                            file_name = '.'.join(
                                [fixture_name, format, compression_format])
                        else:
                            file_name = '.'.join([fixture_name, format])

                        if verbosity > 1:
                            print "Trying %s for %s fixture '%s'..." % \
                                (humanize(fixture_dir), file_name, fixture_name)
                        full_path = os.path.join(fixture_dir, file_name)
                        open_method = compression_types[compression_format]
                        try:
                            fixture = open_method(full_path, 'r')
                            if label_found:
                                fixture.close()
                                self.enable_forward_ref_checks(cursor)
                                print self.style.ERROR(
                                    "Multiple fixtures named '%s' in %s. Aborting."
                                    % (fixture_name, humanize(fixture_dir)))
                                transaction.rollback()
                                transaction.leave_transaction_management()
                                return
                            else:
                                fixture_count += 1
                                objects_in_fixture = 0
                                if verbosity > 0:
                                    print "Installing %s fixture '%s' from %s." % \
                                        (format, fixture_name, humanize(fixture_dir))
                                try:
                                    objects = serializers.deserialize(
                                        format, fixture)
                                    for obj in objects:
                                        objects_in_fixture += 1
                                        self.handle_ref_checks(cursor, obj)
                                        models.add(obj.object.__class__)
                                        obj.save()
                                    object_count += objects_in_fixture
                                    label_found = True
                                except (SystemExit, KeyboardInterrupt):
                                    self.enable_forward_ref_checks(cursor)
                                    raise
                                except Exception:
                                    import traceback
                                    fixture.close()
                                    self.enable_forward_ref_checks(cursor)
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    if show_traceback:
                                        traceback.print_exc()
                                    else:
                                        sys.stderr.write(
                                            self.style.ERROR(
                                                "Problem installing fixture '%s': %s\n"
                                                % (full_path, ''.join(
                                                    traceback.format_exception(
                                                        sys.exc_type,
                                                        sys.exc_value,
                                                        sys.exc_traceback)))))
                                    return
                                fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    self.enable_forward_ref_checks(cursor)
                                    sys.stderr.write(
                                        self.style.ERROR(
                                            "No fixture data found for '%s'. (File format may be invalid.)"
                                            % (fixture_name)))
                                    transaction.rollback()
                                    transaction.leave_transaction_management()
                                    return

                        except Exception, e:
                            if verbosity > 1:
                                print "No %s fixture '%s' in %s." % \
                                    (format, fixture_name, humanize(fixture_dir))
Ejemplo n.º 42
0
def setupBridge(master, settings, config):
    '''Setup the bridget between buildbot and the database.

    This is also the closure in which all things happen that depend
    on the given settings.
    '''

    # allow settings to be none for tests
    if settings is not None:
        os.environ['DJANGO_SETTINGS_MODULE'] = settings

    import bb2mbdb.utils
    reload(bb2mbdb.utils)
    import mbdb.models
    reload(mbdb.models)
    from bb2mbdb.utils import modelForSource, modelForChange, modelForLog, \
        timeHelper
    from mbdb.models import Master, Slave, Builder, BuildRequest, Build

    from django.db import transaction

    try:
        # hack around the lack of @transaction.commit_manually
        transaction.enter_transaction_management()
        transaction.managed(True)
        dbm, new_master = Master.objects.get_or_create(name=master)
        transaction.commit()
    except:
        transaction.rollback()
        raise
    finally:
        transaction.leave_transaction_management()

    class Scheduler(BaseScheduler):
        @transaction.commit_on_success
        def addChange(self, change):
            dbchange = modelForChange(dbm, change)
            log.msg('ADDED CHANGE to DB, %d' % dbchange.number)

        def listBuilderNames(self):
            # Sadly, we need this. Buildbot is going to complain that we
            # don't build. What does he know.
            return []

    if 'schedulers' not in config:
        config['schedulers'] = []
    config['schedulers'].insert(0, Scheduler('bb2mbdb'))

    class StepReceiver(StatusReceiver):
        '''Build- and StatusReceiver helper objects to receive all
        events for a particular step.
        '''
        def __init__(self, dbstep, basedir):
            self.step = dbstep
            self.basedir = basedir

        @transaction.commit_on_success
        def stepTextChanged(self, build, step, text):
            self.step.text = text
            self.step.save()

        @transaction.commit_on_success
        def stepText2Changed(self, build, step, text2):
            self.step.text2 = text2
            self.step.save()

        @transaction.commit_on_success
        def logStarted(self, build, step, log):
            self.log = modelForLog(self.step, log, self.basedir)

        def logChunk(self, build, step, log, channel, text):
            pass

        @transaction.commit_on_success
        def logFinished(self, build, step, log):
            self.log.isFinished = True
            self.log.save()
            pass

        def stepETAUpdate(self, build, step, ETA, expectations):
            '''TODO: ETA support.
            '''
            pass

    class BuildReceiver(StatusReceiver):
        '''StatusReceiver helper object to receive all events
        for a particular build.
        Caches the database model object.
        '''
        def __init__(self, dbbuild, basedir):
            self.build = dbbuild
            self.basedir = basedir
            self.latestStep = self.latestDbStep = None

        @transaction.commit_manually
        def stepStarted(self, build, step):
            self.latestStep = step
            starttime = timeHelper(step.getTimes()[0])
            self.latestDbStep = self.build.steps.create(name=step.getName(),
                                                        starttime=starttime,
                                                        text=step.getText(),
                                                        text2=step.text2)
            transaction.commit()
            return StepReceiver(self.latestDbStep, self.basedir)

        @transaction.commit_on_success
        def stepFinished(self, build, step, results):
            assert step == self.latestStep, "We lost a step somewhere"
            try:
                self.latestStep = None
                self.latestDbStep.endtime = timeHelper(step.getTimes()[1])
                # only the first is the result, the second is text2,
                # ignore that.
                self.latestDbStep.result = results[0]
                self.latestDbStep.text = step.getText()
                self.latestDbStep.text2 = step.text2
                self.latestDbStep.save()
                self.latestDbStep = None
            except Exception, e:
                log.msg(str(e))
            pass

        def buildETAUpdate(self, build, ETA):
            '''TODO: ETA support.
            '''
            pass
Ejemplo n.º 43
0
    def import_data(self,
                    dataset,
                    dry_run=False,
                    raise_errors=False,
                    use_transactions=None):
        """
        Imports data from ``dataset``.

        ``use_transactions``
            If ``True`` import process will be processed inside transaction.
            If ``dry_run`` is set, or error occurs, transaction will be rolled
            back.
        """
        result = Result()

        if use_transactions is None:
            use_transactions = self.get_use_transactions()

        if use_transactions is True:
            # when transactions are used we want to create/update/delete object
            # as transaction will be rolled back if dry_run is set
            real_dry_run = False
            transaction.enter_transaction_management()
            transaction.managed(True)
        else:
            real_dry_run = dry_run

        instance_loader = self._meta.instance_loader_class(self, dataset)

        for row in dataset.dict:
            try:
                row_result = RowResult()
                instance, new = self.get_or_init_instance(instance_loader, row)
                if new:
                    row_result.import_type = RowResult.IMPORT_TYPE_NEW
                else:
                    row_result.import_type = RowResult.IMPORT_TYPE_UPDATE
                row_result.new_record = new
                original = deepcopy(instance)
                if self.for_delete(row, instance):
                    if new:
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                        row_result.diff = self.get_diff(
                            None, None, real_dry_run)
                    else:
                        row_result.import_type = RowResult.IMPORT_TYPE_DELETE
                        self.delete_instance(instance, real_dry_run)
                        row_result.diff = self.get_diff(
                            original, None, real_dry_run)
                else:
                    self.import_obj(instance, row)
                    self.save_instance(instance, real_dry_run)
                    self.save_m2m(instance, row, real_dry_run)
                    row_result.diff = self.get_diff(original, instance,
                                                    real_dry_run)
            except Exception, e:
                tb_info = traceback.format_exc(sys.exc_info()[2])
                row_result.errors.append(Error(repr(e), tb_info))
                if raise_errors:
                    if use_transactions:
                        transaction.rollback()
                        transaction.leave_transaction_management()
                    raise
            result.rows.append(row_result)
Ejemplo n.º 44
0
class Resource(object):
    """
    Resource defines how objects are mapped to their import and export
    representations and handle importing and exporting data.
    """
    __metaclass__ = DeclarativeMetaclass

    def get_use_transactions(self):
        if self._meta.use_transactions is None:
            return USE_TRANSACTIONS
        else:
            return self._meta.use_transactions

    def get_fields(self):
        """
        Returns fields in ``export_order`` order.
        """
        return [self.fields[f] for f in self.get_export_order()]

    @classmethod
    def get_field_name(cls, field):
        """
        Returns field name for given field.
        """
        for field_name, f in cls.fields.items():
            if f == field:
                return field_name
        raise AttributeError("Field %s does not exists in %s resource" %
                             (field, cls))

    def init_instance(self, row=None):
        raise NotImplementedError()

    def get_instance(self, instance_loader, row):
        return instance_loader.get_instance(row)

    def get_or_init_instance(self, instance_loader, row):
        instance = self.get_instance(instance_loader, row)
        if instance:
            return (instance, False)
        else:
            return (self.init_instance(row), True)

    def save_instance(self, instance, dry_run=False):
        self.before_save_instance(instance, dry_run)
        if not dry_run:
            instance.save()
        self.after_save_instance(instance, dry_run)

    def before_save_instance(self, instance, dry_run):
        """
        Override to add additional logic.
        """
        pass

    def after_save_instance(self, instance, dry_run):
        """
        Override to add additional logic.
        """
        pass

    def delete_instance(self, instance, dry_run=False):
        self.before_delete_instance(instance, dry_run)
        if not dry_run:
            instance.delete()
        self.after_delete_instance(instance, dry_run)

    def before_delete_instance(self, instance, dry_run):
        """
        Override to add additional logic.
        """
        pass

    def after_delete_instance(self, instance, dry_run):
        """
        Override to add additional logic.
        """
        pass

    def import_field(self, field, obj, data):
        if field.attribute and field.column_name in data:
            field.save(obj, data)

    def import_obj(self, obj, data):
        """
        """
        for field in self.get_fields():
            if isinstance(field.widget, widgets.ManyToManyWidget):
                continue
            self.import_field(field, obj, data)

    def save_m2m(self, obj, data, dry_run):
        """
        Saves m2m fields.

        Model instance need to have a primary key value before
        a many-to-many relationship can be used.
        """
        if not dry_run:
            for field in self.get_fields():
                if not isinstance(field.widget, widgets.ManyToManyWidget):
                    continue
                self.import_field(field, obj, data)

    def for_delete(self, row, instance):
        """
        Returns ``True`` if ``row`` importing should delete instance.

        Default implementation returns ``False``.
        Override this method to handle deletion.
        """
        return False

    def get_diff(self, original, current, dry_run=False):
        """
        Get diff between original and current object when ``import_data``
        is run.

        ``dry_run`` allows handling special cases when object is not saved
        to database (ie. m2m relationships).
        """
        data = []
        dmp = diff_match_patch()
        for field in self.get_fields():
            v1 = self.export_field(field, original) if original else ""
            v2 = self.export_field(field, current) if current else ""
            diff = dmp.diff_main(unicode(v1), unicode(v2))
            dmp.diff_cleanupSemantic(diff)
            html = dmp.diff_prettyHtml(diff)
            html = mark_safe(html)
            data.append(html)
        return data

    def get_diff_headers(self):
        """
        Diff representation headers.
        """
        return self.get_export_headers()

    def import_data(self,
                    dataset,
                    dry_run=False,
                    raise_errors=False,
                    use_transactions=None):
        """
        Imports data from ``dataset``.

        ``use_transactions``
            If ``True`` import process will be processed inside transaction.
            If ``dry_run`` is set, or error occurs, transaction will be rolled
            back.
        """
        result = Result()

        if use_transactions is None:
            use_transactions = self.get_use_transactions()

        if use_transactions is True:
            # when transactions are used we want to create/update/delete object
            # as transaction will be rolled back if dry_run is set
            real_dry_run = False
            transaction.enter_transaction_management()
            transaction.managed(True)
        else:
            real_dry_run = dry_run

        instance_loader = self._meta.instance_loader_class(self, dataset)

        for row in dataset.dict:
            try:
                row_result = RowResult()
                instance, new = self.get_or_init_instance(instance_loader, row)
                if new:
                    row_result.import_type = RowResult.IMPORT_TYPE_NEW
                else:
                    row_result.import_type = RowResult.IMPORT_TYPE_UPDATE
                row_result.new_record = new
                original = deepcopy(instance)
                if self.for_delete(row, instance):
                    if new:
                        row_result.import_type = RowResult.IMPORT_TYPE_SKIP
                        row_result.diff = self.get_diff(
                            None, None, real_dry_run)
                    else:
                        row_result.import_type = RowResult.IMPORT_TYPE_DELETE
                        self.delete_instance(instance, real_dry_run)
                        row_result.diff = self.get_diff(
                            original, None, real_dry_run)
                else:
                    self.import_obj(instance, row)
                    self.save_instance(instance, real_dry_run)
                    self.save_m2m(instance, row, real_dry_run)
                    row_result.diff = self.get_diff(original, instance,
                                                    real_dry_run)
            except Exception, e:
                tb_info = traceback.format_exc(sys.exc_info()[2])
                row_result.errors.append(Error(repr(e), tb_info))
                if raise_errors:
                    if use_transactions:
                        transaction.rollback()
                        transaction.leave_transaction_management()
                    raise
            result.rows.append(row_result)

        if use_transactions:
            if dry_run or result.has_errors():
                transaction.rollback()
            else:
                transaction.commit()
            transaction.leave_transaction_management()

        return result
 def process_exception(self, request, exception):
     """Rolls back the database and leaves transaction management"""
     if transaction.is_dirty():
         transaction.rollback()
     transaction.leave_transaction_management()
Ejemplo n.º 46
0
 def tearDown(self):
     # Close down the second connection.
     transaction.leave_transaction_management()
     self.conn2.close()
Ejemplo n.º 47
0
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError("Expected exactly 1 argument - Experiment ID")
        try:
            exp = Experiment.objects.get(pk=int(args[0]))
        except Experiment.DoesNotExist:
            raise CommandError("Experiment ID %s not found" % args[0])

        # FIXME - we are fetch a bunch of stuff outside of any transaction, and then
        # doing the deletes in a transaction.  There is an obvious race condition here
        # that may result in components of an experiment not being deleted or being deleted
        # when they shouldn't be.

        # Fetch Datasets and Datafiles and work out which ones would be deleted
        datasets = Dataset.objects.filter(experiments__id=exp.id)
        datafiles = Dataset_File.objects.filter(
            dataset__id__in=map((lambda ds: ds.id), datasets))
        uniqueDatasets = filter((lambda ds: ds.experiments.count() == 1),
                                datasets)
        uniqueDatasetIds = map((lambda ds: ds.id), uniqueDatasets)
        uniqueDatafiles = filter(
            (lambda df: df.dataset.id in uniqueDatasetIds), datafiles)

        # Fetch other stuff to be printed and deleted.
        acls = ExperimentACL.objects.filter(experiment=exp)
        authors = Author_Experiment.objects.filter(experiment=exp)
        epsets = ExperimentParameterSet.objects.filter(experiment=exp)

        confirmed = options.get('confirmed', False)
        listOnly = options.get('list', False)
        if not listOnly and not confirmed:
            self.stdout.write("Delete the following experiment?\n\n")

        if listOnly or not confirmed:
            # Print basic experiment information
            self.stdout.write("Experiment\n    ID: {0}\n".format(exp.id))
            self.stdout.write("    Title: {0}\n".format(exp.title))
            self.stdout.write("    Locked: {0}\n".format(exp.locked))
            self.stdout.write("    Public Access: {0}\n".format(
                exp.public_access))

            # List experiment authors
            self.stdout.write("    Authors:\n")
            for author in authors:
                self.stdout.write("        {0}\n".format(author.author))

            # List experiment metadata
            for epset in epsets:
                self.stdout.write("    Param Set: {0} - {1}\n".format(
                    epset.schema.name, epset.schema.namespace))
                params = ExperimentParameter.objects.filter(parameterset=epset)
                for param in params:
                    self.stdout.write("        {0} = {1}\n".format(
                        param.name.full_name, param.get()))

            # List experiment ACLs
            self.stdout.write("    ACLs:\n")
            for acl in acls:
                self.stdout.write("        {0}-{1}, flags: ".format(
                    acl.pluginId, acl.entityId))
                if acl.canRead:
                    self.stdout.write("R")
                if acl.canWrite:
                    self.stdout.write("W")
                if acl.canDelete:
                    self.stdout.write("D")
                if acl.isOwner:
                    self.stdout.write("O")
                self.stdout.write("\n")

            # Basic Statistics
            self.stdout.write(
                "    {0} total dataset(s), containing {1} file(s)\n".format(
                    datasets.count(), datafiles.count()))
            self.stdout.write(
                "    {0} non-shared dataset(s), containing {1} file(s)\n".
                format(len(uniqueDatasets), len(uniqueDatafiles)))
            if len(uniqueDatasets) > 0 and not listOnly:
                self.stdout.write(
                    "        (The non-shared datasets and files will be deleted)\n"
                )

        # If the user has only requested a listing finish now
        if listOnly:
            return

        if not confirmed:
            # User must enter "yes" to proceed
            self.stdout.write("\n\nConfirm Deletion? (yes): ")
            ans = sys.stdin.readline().strip()
            if ans != "yes":
                self.stdout.write("'yes' not entered, aborting.\n")
                return

        # Consider the entire experiment deletion atomic
        using = options.get('database', DEFAULT_DB_ALIAS)
        transaction.commit_unless_managed(using=using)
        transaction.enter_transaction_management(using=using)
        transaction.managed(True, using=using)

        try:
            acls.delete()
            epsets.delete()
            for dataset in datasets:
                dataset.experiments.remove(exp.id)
                if dataset.experiments.count() == 0:
                    DatasetParameterSet.objects.filter(
                        dataset=dataset).delete()
                    for datafile in Dataset_File.objects.filter(
                            dataset=dataset):
                        DatafileParameterSet.objects.filter(
                            dataset_file=datafile).delete()
                        datafile.delete()
                    dataset.delete()
            authors.delete()
            exp.delete()

            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)
        except Exception:
            transaction.rollback(using=using)
            exc_class, exc, tb = sys.exc_info()
            new_exc = CommandError(
                "Exception %s has occurred: rolled back transaction" %
                (exc or exc_class))
            raise new_exc.__class__, new_exc, tb
Ejemplo n.º 48
0
            elif line[0] != "#":
                sys.stderr.write("Junk data on line %s" % i)

            db.reset_queries()

            i += 1
            pct = (i * 100 / totalobjs)
            if pct != prev_pct:
                sys.stdout.write("  [%s%%]\r" % pct)
                sys.stdout.flush()
                prev_pct = pct

        f.close()

        transaction.commit()
        transaction.leave_transaction_management()
    else:
        # Legacy dumpdata output. Try loading it directly.
        print "Importing old style dump format. This may take a while."
        f.close()
        os.system("./reviewboard/manage.py loaddata %s" % filename)
except Exception, e:
    f.close()
    sys.stderr.write("Problem installing '%s': %s\n" % (filename, str(e)))
    sys.exit(1)

    if transaction_setup:
        transaction.rollback()
        transaction.leave_transaction_management()

print
Ejemplo n.º 49
0
def migrate_model(processor, model, fields):
    from ella.core.models import Publishable
    model = get_model(*model.split('.'))
    ct = ContentType.objects.get_for_model(model)
    if model == Publishable:
        ct = None
    print 'processing', model._meta, ':',
    sys.stdout.flush()

    converted = 0
    deps = 0

    try:
        enter_transaction_management()
        managed(True)

        try:
            for m in model.objects.order_by().iterator():
                if not ct:  # publishable
                    ct = ContentType.objects.get_for_id(m.content_type_id)
                sys.stdout.write('.')
                converted += 1

                # commit every 1000 iterations
                if (converted % 1000) == 0 and is_dirty():
                    commit()
                    sys.stdout.write('C')
                    sys.stdout.flush()

                dirty = False
                for f in fields:
                    val = getattr(m, f)
                    if val:
                        val, cnt = BOX_RE.subn(update_field(m, ct), val)
                        if cnt > 0:
                            deps += cnt
                            setattr(m, f, val)
                            dirty = True

                SourceText.objects.extract_from_instance(m,
                                                         processor,
                                                         fields,
                                                         content_type=ct,
                                                         force_save=dirty,
                                                         force_create=True)
        except:
            # rollback and propagate if something goes wrong
            if is_dirty():
                rollback()
            raise
        else:
            # commit at the end
            if is_dirty():
                commit()
    finally:
        leave_transaction_management()

    print
    print 'DONE converted %d (%d reported dependencies)' % (
        converted,
        deps,
    )
    sys.stdout.flush()
Ejemplo n.º 50
0
    def handle(self, *fixture_files, **options):

        using = options.get('database', DEFAULT_DB_ALIAS)
        mode = options.get('mode', 'append')
        items_into_tree = options.get('into_tree', None)

        if items_into_tree is not None:
            try:
                items_into_tree = Tree.objects.get(alias=items_into_tree)
            except ObjectDoesNotExist:
                raise CommandError(
                    'Target tree alised by `%s` does not exist. Please create it before import.'
                    % items_into_tree)
            else:
                mode = 'append'

        connection = connections[using]
        cursor = connection.cursor()

        self.style = no_style()

        transaction.commit_unless_managed(using=using)
        transaction.enter_transaction_management(using=using)
        transaction.managed(True, using=using)

        loaded_object_count = 0

        if mode == 'replace':
            try:
                Tree.objects.all().delete()
                TreeItem.objects.all().delete()
            except ObjectDoesNotExist:
                pass

        for fixture_file in fixture_files:

            self.stdout.write('Loading fixture from `%s` ...\n' % fixture_file)

            fixture = file(fixture_file, 'r')

            try:
                objects = serializers.deserialize('json', fixture, using=using)
            except (SystemExit, KeyboardInterrupt):
                raise

            trees = []
            tree_items = defaultdict(list)
            tree_item_parents = defaultdict(list)
            tree_items_new_indexes = {}

            for obj in objects:
                if router.allow_syncdb(using, obj.object.__class__):
                    if isinstance(obj.object, (Tree, TreeItem)):
                        if isinstance(obj.object, Tree):
                            trees.append(obj.object)
                        else:
                            if items_into_tree is not None:
                                obj.object.tree_id = items_into_tree.id
                            tree_items[obj.object.tree_id].append(obj.object)
                            tree_item_parents[obj.object.parent_id].append(
                                obj.object.id)

            if items_into_tree is not None:
                trees = [
                    items_into_tree,
                ]

            try:

                for tree in trees:

                    self.stdout.write('\nImporting tree `%s` ...\n' %
                                      tree.alias)
                    orig_tree_id = tree.id

                    if items_into_tree is None:
                        if mode == 'append':
                            tree.pk = None
                            tree.id = None

                        tree.save(using=using)
                        loaded_object_count += 1

                    parents_ahead = []

                    for tree_item in tree_items[orig_tree_id]:
                        parent_ahead = False
                        self.stdout.write('Importing item `%s` ...\n' %
                                          tree_item.title)
                        tree_item.tree_id = tree.id
                        orig_item_id = tree_item.id

                        if mode == 'append':
                            tree_item.pk = None
                            tree_item.id = None

                            if tree_item.id in tree_items_new_indexes:
                                tree_item.pk = tree_item.id = tree_items_new_indexes[
                                    tree_item.id]

                            if tree_item.parent_id is not None:
                                if tree_item.parent_id in tree_items_new_indexes:
                                    tree_item.parent_id = tree_items_new_indexes[
                                        tree_item.parent_id]
                                else:
                                    parent_ahead = True

                        tree_item.save(using=using)
                        loaded_object_count += 1

                        if mode == 'append':
                            tree_items_new_indexes[orig_item_id] = tree_item.id
                            if parent_ahead:
                                parents_ahead.append(tree_item)

                    # Second pass is necessary for tree items being imported before their parents.
                    for tree_item in parents_ahead:
                        tree_item.parent_id = tree_items_new_indexes[
                            tree_item.parent_id]
                        tree_item.save(using=using)

            except (SystemExit, KeyboardInterrupt):
                raise

            except Exception:
                import traceback
                fixture.close()
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
                self.stderr.write(
                    self.style.ERROR('Fixture `%s` import error: %s\n' %
                                     (fixture_file, ''.join(
                                         traceback.format_exception(
                                             sys.exc_type, sys.exc_value,
                                             sys.exc_traceback)))))

            fixture.close()

        # Reset DB sequences, for DBMS with sequences support.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                self.style, [Tree, TreeItem])
            if sequence_sql:
                self.stdout.write('Resetting DB sequences ...\n')
                for line in sequence_sql:
                    cursor.execute(line)

        transaction.commit(using=using)
        transaction.leave_transaction_management(using=using)

        connection.close()
Ejemplo n.º 51
0
 def _leave_transaction_management(self):
     transaction.leave_transaction_management(using=self.using)
     if not connections[self.using].is_managed() and \
             connections[self.using].features.uses_autocommit:
         connections[self.using]._set_isolation_level(
                         psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
Ejemplo n.º 52
0
    def handle(self, *fixture_labels, **options):
        from django.db.models import get_apps
        from django.core import serializers
        from django.db import connection, transaction
        from django.conf import settings

        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        object_count = 0
        objects_per_fixture = []
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed()
            transaction.enter_transaction_management()
            transaction.managed(True)

        app_fixtures = [
            os.path.join(os.path.dirname(app.__file__), 'fixtures')
            for app in get_apps()
        ]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')
            if len(parts) == 1:
                fixture_name = fixture_label
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    print "Loading '%s' fixtures..." % fixture_name
            else:
                sys.stderr.write(
                    self.style.ERROR(
                        "Problem installing fixture '%s': %s is not a known serialization format."
                        % (fixture_name, format)))
                transaction.rollback()
                transaction.leave_transaction_management()
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(
                    settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    print "Checking %s for fixtures..." % humanize(fixture_dir)

                label_found = False
                for format in formats:
                    serializer = serializers.get_serializer(format)
                    if verbosity > 1:
                        print "Trying %s for %s fixture '%s'..." % \
                            (humanize(fixture_dir), format, fixture_name)
                    try:
                        full_path = os.path.join(
                            fixture_dir, '.'.join([fixture_name, format]))
                        fixture = open(full_path, 'r')
                        if label_found:
                            fixture.close()
                            print self.style.ERROR(
                                "Multiple fixtures named '%s' in %s. Aborting."
                                % (fixture_name, humanize(fixture_dir)))
                            transaction.rollback()
                            transaction.leave_transaction_management()
                            return
                        else:
                            fixture_count += 1
                            objects_per_fixture.append(0)
                            if verbosity > 0:
                                print "Installing %s fixture '%s' from %s." % \
                                    (format, fixture_name, humanize(fixture_dir))
                            try:
                                objects = serializers.deserialize(
                                    format, fixture)
                                for obj in objects:
                                    object_count += 1
                                    objects_per_fixture[-1] += 1
                                    models.add(obj.object.__class__)
                                    obj.save()
                                label_found = True
                            except (SystemExit, KeyboardInterrupt):
                                raise
                            except Exception:
                                import traceback
                                fixture.close()
                                transaction.rollback()
                                transaction.leave_transaction_management()
                                if show_traceback:
                                    import traceback
                                    traceback.print_exc()
                                else:
                                    sys.stderr.write(
                                        self.style.ERROR(
                                            "Problem installing fixture '%s': %s\n"
                                            % (full_path,
                                               traceback.format_exc())))
                                return
                            fixture.close()
                    except:
                        if verbosity > 1:
                            print "No %s fixture '%s' in %s." % \
                                (format, fixture_name, humanize(fixture_dir))

        # If any of the fixtures we loaded contain 0 objects, assume that an
        # error was encountered during fixture loading.
        if 0 in objects_per_fixture:
            sys.stderr.write(
                self.style.ERROR(
                    "No fixture data found for '%s'. (File format may be invalid.)"
                    % (fixture_name)))
            transaction.rollback()
            transaction.leave_transaction_management()
            return

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                self.style, models)
            if sequence_sql:
                if verbosity > 1:
                    print "Resetting sequences"
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit()
            transaction.leave_transaction_management()

        if object_count == 0:
            if verbosity > 1:
                print "No fixtures found."
        else:
            if verbosity > 0:
                print "Installed %d object(s) from %d fixture(s)" % (
                    object_count, fixture_count)

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()
Ejemplo n.º 53
0
def update_imported_docs(project, version):
    """
    Check out or update the given project's repository.
    """
    update_docs_output = {}
    if not project.vcs_repo():
        raise ProjectImportError("Repo type '{repo_type}' unknown".format(
            repo_type=project.repo_type))

    if version:
        log.info('Checking out version {slug}: {identifier}'.format(
            slug=version.slug, identifier=version.identifier))
        version_slug = version.slug
        version_repo = project.vcs_repo(version_slug)
        update_docs_output['checkout'] = version_repo.checkout(
            version.identifier)
    else:
        log.info('Updating to latest revision')
        version_slug = 'latest'
        version_repo = project.vcs_repo(version_slug)
        update_docs_output['checkout'] = version_repo.update()

    # Ensure we have a conf file (an exception is raised if not)
    conf_file = project.conf_file(version.slug)

    #Do Virtualenv bits:
    if project.use_virtualenv:
        update_docs_output['venv'] = run(
            '{cmd} --distribute --never-download --no-site-packages {path}'.
            format(cmd='virtualenv',
                   path=project.venv_path(version=version_slug)))
        update_docs_output['sphinx'] = run('{cmd} install -U sphinx'.format(
            cmd=project.venv_bin(version=version_slug, bin='pip')))

        if project.requirements_file:
            os.chdir(project.checkout_path(version_slug))
            update_docs_output['requirements'] = run(
                '{cmd} install -r {requirements}'.format(
                    cmd=project.venv_bin(version=version_slug, bin='pip'),
                    requirements=project.requirements_file))
        os.chdir(project.checkout_path(version_slug))
        update_docs_output['install'] = run(
            '{cmd} setup.py install --force'.format(
                cmd=project.venv_bin(version=version_slug, bin='python')))

    # check tags/version
    #XXX:dc: what in this block raises the values error?
    try:
        if version_repo.supports_tags:
            transaction.enter_transaction_management(True)
            tags = version_repo.tags
            old_tags = [
                obj['identifier']
                for obj in api.version.get(project__slug=project.slug,
                                           limit=50)['objects']
            ]
            for tag in tags:
                if tag.identifier in old_tags:
                    continue
                slug = slugify_uniquely(Version,
                                        tag.verbose_name,
                                        'slug',
                                        255,
                                        project=project)
                try:

                    api.version.post(
                        dict(project="/api/v1/project/%s/" % project.pk,
                             slug=slug,
                             identifier=tag.identifier,
                             verbose_name=tag.verbose_name))
                    log.info("New tag found: {0}".format(tag.identifier))
                    highest = project.highest_version['version']
                    ver_obj = mkversion(ver)
                    #TODO: Handle updating higher versions automatically.
                    #This never worked very well, anyways.
                    if highest and ver_obj and ver_obj > highest:
                        log.info("Highest verison known, building docs")
                        update_docs.delay(ver.project.pk, version_pk=ver.pk)
                except Exception, e:
                    log.error("Failed to create version (tag)", exc_info=True)
                    transaction.rollback()
                    #break
            transaction.leave_transaction_management()
        if version_repo.supports_branches:
            transaction.enter_transaction_management(True)
            branches = version_repo.branches
            old_branches = [
                obj['identifier']
                for obj in api.version.get(project__slug=project.slug,
                                           limit=50)['objects']
            ]
            for branch in branches:
                if branch.identifier in old_branches:
                    continue
                slug = slugify_uniquely(Version,
                                        branch.verbose_name,
                                        'slug',
                                        255,
                                        project=project)
                try:
                    api.version.post(
                        dict(project="/api/v1/project/%s/" % project.pk,
                             slug=slug,
                             identifier=branch.identifier,
                             verbose_name=branch.verbose_name))
                    log.info("New branch found: {0}".format(branch.identifier))
                except Exception, e:
                    log.error("Failed to create version (branch)",
                              exc_info=True)
                    transaction.rollback()
                    #break
            transaction.leave_transaction_management()
Ejemplo n.º 54
0
                            e.doAction()

                        # Now update the evolution table
                        version = Version(signature=current_signature)
                        version.save(**using_args)

                        for evolution in new_evolutions:
                            evolution.version = version
                            evolution.save(**using_args)

                        transaction.commit(**using_args)
                    except Exception, ex:
                        transaction.rollback(**using_args)
                        raise CommandError('Error applying evolution: %s' %
                                           str(ex))

                    transaction.leave_transaction_management(**using_args)

                    if verbosity > 0:
                        print 'Evolution successful.'
                else:
                    print self.style.ERROR('Evolution cancelled.')
            elif not compile_sql:
                if verbosity > 0:
                    if simulated:
                        print "Trial evolution successful."
                        print "Run './manage.py evolve %s--execute' to apply evolution." % (
                            hint and '--hint ' or '')
        elif verbosity > 0:
            print 'No evolution required.'
Ejemplo n.º 55
0
 def tearDown(self):
     # The tests access the database after exercising 'atomic', making the
     # connection dirty; a rollback is required to make it clean.
     transaction.rollback()
     transaction.leave_transaction_management()
Ejemplo n.º 56
0
 def test_atomic_prevents_calling_transaction_management_methods(self):
     with transaction.atomic():
         with self.assertRaises(transaction.TransactionManagementError):
             transaction.enter_transaction_management()
         with self.assertRaises(transaction.TransactionManagementError):
             transaction.leave_transaction_management()
Ejemplo n.º 57
0
def create_data(url_root):
    gateway = urljoin(url_root, 'router/')

    transaction.enter_transaction_management()

    doctor = User()
    doctor.mcid = mcid_generator.next_mcid()
    doctor.first_name = 'Demo'
    doctor.last_name = 'Doctor'
    doctor.email = '*****@*****.**'
    doctor.updatetime = 0
    doctor.ccrlogupdatetime = 0
    doctor.save()

    physician = User()
    physician.mcid = mcid_generator.next_mcid()
    physician.first_name = 'Demo'
    physician.last_name = 'Physician'
    physician.email = '*****@*****.**'
    physician.updatetime = 0
    physician.ccrlogupdatetime = 0
    physician.save()

    g, p = create_group('Demo Group Worklist',
                        '*****@*****.**', url_root,
                        doctor.mcid)

    add_to_group(g, physician.mcid)

    patients = []
    for first_name, last_name, email, currentccr, reason, ccrs in PATIENTS:
        user = User()
        user.first_name = first_name
        user.last_name = last_name
        user.email = email
        user.mcid = mcid_generator.next_mcid()
        user.acctype = 'USER'
        user.rolehack = 'ccrlhm'
        user.updatetime = 0
        user.ccrlogupdatetime = 0
        user.save()

        patients.append(user)

        # Set worklist
        sql_execute(ACCOUNT_RLS_SQL, user.mcid, p.practiceRlsUrl)

        if not currentccr:
            continue

        sql_execute(DOCUMENT_TYPE_SQL, user.mcid, currentccr)

        ev = Practiceccrevent()
        ev.practiceid = p
        ev.PatientGivenName = user.first_name
        ev.PatientFamilyName = user.last_name
        ev.PatientIdentifier = user.mcid
        ev.PatientIdentifierSource = 'Patient Medcommons ID'
        ev.Guid = currentccr
        ev.Purpose = reason
        ev.SenderProviderId = 'idp'
        ev.ReceiverProviderId = 'idp'
        ev.DOB = '16 Jan 1968 05:00:00 GMT'
        ev.CXPServerURL = ''
        ev.CXPServerVendor = 'Medcommons'
        ev.ViewerURL = urljoin(gateway, 'access?g=%s' % currentccr)
        ev.Comment = '\n            3D Imaging Consult\n            '
        ev.CreationDateTime = 1162365858
        ev.ConfirmationCode = tn_generator.next_tracking_number()
        ev.RegistrySecret = ''
        ev.PatientSex = 'Female'
        ev.PatientAge = ''
        ev.Status = 'New'
        ev.ViewStatus = 'Visible'
        ev.save()

        sql_execute(CCRLOG_SQL, user.mcid, currentccr, 'CCR',
                    ev.ConfirmationCode)

        for ccr in ccrs:
            sql_execute(CCRLOG_SQL, user.mcid, ccr, 'CCR',
                        tn_generator.next_tracking_number())

    sql_execute(TODIR_SQL, g.groupinstanceid, doctor.email, doctor.email,
                doctor.mcid)

    sql_execute(TODIR_SQL, g.groupinstanceid, physician.email, physician.email,
                physician.mcid)

    demoCCR = 'fdfbbb9cf53f8577b420ed72567cd2104589fb0d'

    sql_execute(CCRLOG_SQL, doctor.mcid, demoCCR, 'DICOM Import',
                tn_generator.next_tracking_number())

    sql_execute(CCRLOG_SQL, patients[0].mcid, demoCCR, 'DICOM Import',
                tn_generator.next_tracking_number())

    sql_execute(CCRLOG_SQL, patients[0].mcid, PATIENTS[0][3], 'DICOM Import',
                tn_generator.next_tracking_number())

    # Secondary group
    if 0:
        g2, p2 = create_group('Healthy Doctors',
                              '*****@*****.**', url_root,
                              doctor.mcid)

    p = MCProperty()
    p.property = 'acDemoDoctor'
    p.value = doctor.mcid
    p.save()

    transaction.leave_transaction_management()
Ejemplo n.º 58
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else: constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else: plantype = 1
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)

      # Log task
      task.save(using=database)
      transaction.commit(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__),'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__),'commands.py')
          break
      if not cmd: raise Exception("Can't locate commands.py")

      # Execute
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME,'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME,'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(settings.FREPPLE_HOME,'python27.zip') + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      ret = os.system('frepple "%s"' % cmd.replace('\\','\\\\'))
      if ret != 0 and ret != 2:
        # Return code 0 is a successful run
        # Return code is 2 is a run cancelled by a user. That's shown in the status field.
        raise Exception('Failed with exit code %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      transaction.leave_transaction_management(using=database)
Ejemplo n.º 59
0
    def handle(self, *fixture_labels, **options):
        using = options.get('database', DEFAULT_DB_ALIAS)

        connection = connections[using]
        self.style = no_style()

        verbosity = int(options.get('verbosity', 1))
        show_traceback = options.get('traceback', False)

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: file,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
        connection.begin_defer_constraint_checks()

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(path)
            else:
                # It's a models.py module
                app_module_paths.append(app.__file__)

        app_fixtures = [
            os.path.join(os.path.dirname(path), 'fixtures')
            for path in app_module_paths
        ]
        for fixture_label in fixture_labels:
            parts = fixture_label.split('.')

            if len(parts) > 1 and parts[-1] in compression_types:
                compression_formats = [parts[-1]]
                parts = parts[:-1]
            else:
                compression_formats = compression_types.keys()

            if len(parts) == 1:
                fixture_name = parts[0]
                formats = serializers.get_public_serializer_formats()
            else:
                fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                if format in serializers.get_public_serializer_formats():
                    formats = [format]
                else:
                    formats = []

            if formats:
                if verbosity > 1:
                    self.stdout.write("Loading '%s' fixtures...\n" %
                                      fixture_name)
            else:
                self.stderr.write(
                    self.style.ERROR(
                        "Problem installing fixture '%s': %s is not a known serialization format.\n"
                        % (fixture_name, format)))
                # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                connection.end_defer_constraint_checks()

                if commit:
                    transaction.rollback(using=using)
                    transaction.leave_transaction_management(using=using)
                return

            if os.path.isabs(fixture_name):
                fixture_dirs = [fixture_name]
            else:
                fixture_dirs = app_fixtures + list(
                    settings.FIXTURE_DIRS) + ['']

            for fixture_dir in fixture_dirs:
                if verbosity > 1:
                    self.stdout.write("Checking %s for fixtures...\n" %
                                      humanize(fixture_dir))

                label_found = False
                for combo in product([using, None], formats,
                                     compression_formats):
                    database, format, compression_format = combo
                    file_name = '.'.join(
                        p for p in
                        [fixture_name, database, format, compression_format]
                        if p)

                    if verbosity > 1:
                        self.stdout.write("Trying %s for %s fixture '%s'...\n" % \
                            (humanize(fixture_dir), file_name, fixture_name))
                    full_path = os.path.join(fixture_dir, file_name)
                    open_method = compression_types[compression_format]
                    try:
                        fixture = open_method(full_path, 'r')
                        if label_found:
                            fixture.close()
                            self.stderr.write(
                                self.style.ERROR(
                                    "Multiple fixtures named '%s' in %s. Aborting.\n"
                                    % (fixture_name, humanize(fixture_dir))))
                            # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                            connection.end_defer_constraint_checks()

                            if commit:
                                transaction.rollback(using=using)
                                transaction.leave_transaction_management(
                                    using=using)
                            return
                        else:
                            fixture_count += 1
                            objects_in_fixture = 0
                            loaded_objects_in_fixture = 0
                            if verbosity > 0:
                                self.stdout.write("Installing %s fixture '%s' from %s.\n" % \
                                    (format, fixture_name, humanize(fixture_dir)))
                            try:
                                objects = serializers.deserialize(format,
                                                                  fixture,
                                                                  using=using)
                                for obj in objects:
                                    objects_in_fixture += 1
                                    if router.allow_syncdb(
                                            using, obj.object.__class__):
                                        loaded_objects_in_fixture += 1
                                        models.add(obj.object.__class__)
                                        obj.save(using=using)
                                loaded_object_count += loaded_objects_in_fixture
                                fixture_object_count += objects_in_fixture
                                label_found = True
                            except (SystemExit, KeyboardInterrupt):
                                raise
                            except Exception:
                                import traceback
                                fixture.close()

                                # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                                connection.end_defer_constraint_checks()

                                if commit:
                                    transaction.rollback(using=using)
                                    transaction.leave_transaction_management(
                                        using=using)
                                if show_traceback:
                                    traceback.print_exc()
                                else:
                                    self.stderr.write(
                                        self.style.ERROR(
                                            "Problem installing fixture '%s': %s\n"
                                            % (full_path, ''.join(
                                                traceback.format_exception(
                                                    sys.exc_type,
                                                    sys.exc_value,
                                                    sys.exc_traceback)))))
                                return
                            fixture.close()

                            # If the fixture we loaded contains 0 objects, assume that an
                            # error was encountered during fixture loading.
                            if objects_in_fixture == 0:
                                self.stderr.write(
                                    self.style.ERROR(
                                        "No fixture data found for '%s'. (File format may be invalid.)\n"
                                        % (fixture_name)))
                                # from https://code.djangoproject.com/attachment/ticket/3615/defer_constraint_checks.diff
                                connection.end_defer_constraint_checks()

                                if commit:
                                    transaction.rollback(using=using)
                                    transaction.leave_transaction_management(
                                        using=using)
                                return

                    except Exception, e:
                        if verbosity > 1:
                            self.stdout.write("No %s fixture '%s' in %s.\n" % \
                                (format, fixture_name, humanize(fixture_dir)))
Ejemplo n.º 60
0
    def handle(self, *fixture_labels, **options):

        self.ignore = options.get('ignore')
        self.using = options.get('database')

        connection = connections[self.using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line."
            )

        self.verbosity = int(options.get('verbosity'))

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        self.fixture_count = 0
        self.loaded_object_count = 0
        self.fixture_object_count = 0
        self.models = set()

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=self.using)
            transaction.enter_transaction_management(using=self.using)
            transaction.managed(True, using=self.using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        self.compression_types = {
            None:   open,
            'gz':   gzip.GzipFile,
            'zip':  SingleZipReader
        }
        if has_bz2:
            self.compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(upath(path))
            else:
                # It's a models.py module
                app_module_paths.append(upath(app.__file__))

        app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    self.load_label(fixture_label, app_fixtures)

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in self.models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e,)
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=self.using)
                transaction.leave_transaction_management(using=self.using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if self.loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models)
            if sequence_sql:
                if self.verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=self.using)
            transaction.leave_transaction_management(using=self.using)

        if self.verbosity >= 1:
            if self.fixture_object_count == self.loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" % (
                    self.loaded_object_count, self.fixture_count))
            else:
                self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" % (
                    self.loaded_object_count, self.fixture_object_count, self.fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()