Example #1
0
 def forwards(self, orm):
     # Adding index on 'StockTransaction', fields ['case_id', 'product_id', 'section_id']
     db.commit_transaction()
     db.execute(
         'CREATE INDEX CONCURRENTLY "stock_stocktransaction_case_id_prod_id_sec_id" ON "stock_stocktransaction" ("case_id","product_id","section_id")'
     )
     db.start_transaction()
    def forwards(self, orm):
        # If there are duplicated documents, we'll have an error when we try to
        # create this index. So to protect against that, we should delete those
        # documents before we create the index.

        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
        try:
            db.start_transaction()
            duplicated_records = Document.objects \
                .values('content_type_id', 'object_id') \
                .annotate(id_count=models.Count('id')) \
                .filter(id_count__gt=1)

            # Delete all but the first document.
            for record in duplicated_records:
                docs = Document.objects \
                    .values_list('id', flat=True) \
                    .filter(
                        content_type_id=record['content_type_id'],
                        object_id=record['object_id'],
                    )[1:]

                docs = list(docs)

                logging.warn('Deleting documents %s' % docs)

                Document.objects.filter(id__in=docs).delete()
            db.commit_transaction()
        except Exception, e:
            db.rollback_transaction()
            raise e
    def forwards(self, orm):
        "Write your forwards methods here."
        db.start_transaction()
        self.set_foreign_key_checks(False)

        # asset
        for model in orm.AssetModel.objects.all():
            new_model = orm.PricingObjectModel.objects.create(
                model_id=model.model_id,
                name=model.name,
                manufacturer=model.manufacturer,
                category=model.category,
                type_id=1,
            )
            model.assetinfo_set.update(model=new_model)

        # tenant
        for model in orm.TenantGroup.objects.all():
            new_model = orm.PricingObjectModel.objects.create(
                name=model.name,
                model_id=model.group_id,
                type_id=3,
            )
            model.tenants.update(model=new_model)

        # move base usages over 100
        self.bumped_auto_increment(101 + orm.BaseUsage.objects.count())
        self.update_usage_id()

        self.set_foreign_key_checks(True)
        db.commit_transaction()
Example #4
0
    def forwards(self, orm):
        """
        Adds an 'inserted_at' column to each existing data source table
        """
        _sync_couch()
        table_names = _get_all_table_names()

        num_tables = len(table_names)
        logger.info(
            "Start adding inserted_at column to %s existing UCR datasource tables",
            num_tables)

        for table_name in table_names:
            try:
                logger.info("Adding inserted_at column to %s", table_name)
                db.start_transaction()
                db.add_column(table_name, 'inserted_at',
                              models.DateTimeField(null=True))
            except DatabaseError:
                logger.warning("Adding inserted_at column failed for %s",
                               table_name)
            finally:
                db.commit_transaction()

        logger.info(
            "Finished adding inserted_at columns to existing UCR datasource tables"
        )
    def backwards(self, orm):
        
        # Adding field 'Dataset.typed_columns'
        db.add_column('panda_dataset', 'typed_columns', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.column_types'
        db.add_column('panda_dataset', 'column_types', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.typed_column_names'
        db.add_column('panda_dataset', 'typed_column_names', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.columns'
        db.add_column('panda_dataset', 'columns', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        db.commit_transaction()     # Commit the first transaction
        db.start_transaction()      # Start the second, committed on completion

        if not db.dry_run:
            for dataset in orm.Dataset.objects.all():
                columns = []
                typed_columns = []
                column_types = []
                typed_column_names = []

                for schema in dataset.column_schema:
                    columns.append(schema['name'])
                    typed_columns.append(schema['indexed'])
                    column_types.append(schema['type'])
                    typed_column_names.append(schema['indexed_name'])

                dataset.columns = columns
                dataset.typed_columns = typed_columns
                dataset.column_types = column_types
                dataset.typed_column_names = typed_column_names
                dataset.save()
Example #6
0
def create_auto_m2m_tables(model_class):
    " Create tables for ManyToMany fields "
    for f in model_class._meta.many_to_many:
        if f.rel.through:
            try:
                # Django 1.2+
                through = f.rel.through
            except AttributeError:
                # Django 1.1 and below
                through = f.rel.through_model

        if (not f.rel.through) or getattr(through._meta, "auto_created", None):

            # Create the standard implied M2M table
            m2m_table_name = f.m2m_db_table()
            if (connection.introspection.table_name_converter(m2m_table_name) 
                        not in connection.introspection.table_names()):

                db.start_transaction()
                m2m_column_name = f.m2m_column_name()[:-3] # without "_id"
                m2m_reverse_name = f.m2m_reverse_name()[:-3] # without "_id"
                db.create_table(f.m2m_db_table(), (
                    ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
                    (m2m_column_name, models.ForeignKey(model_class, null=False)),
                    (m2m_reverse_name, models.ForeignKey(f.rel.to, null=False))
                ))
                db.create_unique(f.m2m_db_table(), [f.m2m_column_name(), f.m2m_reverse_name()])
                #db.execute_deferred_sql()
                db.commit_transaction()
                logger.debug("Created table '%s'" % m2m_table_name)
Example #7
0
    def _class_prepared_handler(sender, **kwargs):
        """ Signal handler for class_prepared. 
            This will be run for every model, looking for the moment when all
            dependent models are prepared for the first time. It will then run
            the given function, only once.
        """

        sender_app=sender._meta.app_label.lower()+'.'+sender._meta.object_name
        already_prepared=set([sender_app])
        for app,models in app_cache.app_models.items():
            for model_name,model in models.items():
                already_prepared.add(app.lower()+'.'+model_name)
                
        if all([x in already_prepared for x in dependencies]):
            db.start_transaction()
            try:
                # We need to disconnect, otherwise each new dynamo model generation
                # will trigger it and cause a "maximim recursion error"
                class_prepared.disconnect(_class_prepared_handler,weak=False)                
                fn()
            except DatabaseError, message:
                # If tables are  missing altogether, not much we can do
                # until syncdb/migrate is run. "The code must go on" in this 
                # case, without running our function completely. At least
                # database operations will be rolled back.
                db.rollback_transaction()
                # Better connect again
                if message<>'no such table: dynamo_metamodel':
                    class_prepared.connect(_class_prepared_handler, weak=False)
                else:
                    raise
            else:
                db.commit_transaction()
    def forwards(self, orm):
        # Removing unique constraint on 'Entry', fields ['slug', 'anchor']
        # [This doesn't seem to be in all of our production databases. -Michael]
        # db.delete_unique('miniblog_entry', ['slug', 'anchor_id'])

        # Delete all announcements/entries that were not anchored on Q/Web
        # since those anchor points now have no meaning
        # This is wrapped in a transaction so that the schema changes below
        # will work properly
        db.start_transaction()
        try:
            web_node_id = GetNode('Q/Web').id
        except DataTree.DoesNotExist:
            #   If there is no DataTree, delete everything.
            web_node_id = -1
        db.execute(
            'DELETE FROM "miniblog_announcementlink" WHERE "anchor_id" != %s',
            [
                web_node_id,
            ])
        db.execute('DELETE FROM "miniblog_entry" WHERE "anchor_id" != %s', [
            web_node_id,
        ])
        db.commit_transaction()

        # Deleting field 'AnnouncementLink.anchor'
        db.delete_column('miniblog_announcementlink', 'anchor_id')

        # Deleting field 'Entry.anchor'
        db.delete_column('miniblog_entry', 'anchor_id')
    def forwards(self, orm):

        # Adding field 'Dataset.column_types'
        db.add_column('panda_dataset',
                      'column_types',
                      self.gf('panda.fields.JSONField')(default=None,
                                                        null=True),
                      keep_default=False)

        # Adding field 'Dataset.typed_column_names'
        db.add_column('panda_dataset',
                      'typed_column_names',
                      self.gf('panda.fields.JSONField')(default=None,
                                                        null=True),
                      keep_default=False)

        db.commit_transaction()  # Commit the first transaction
        db.start_transaction()  # Start the second, committed on completion

        if not db.dry_run:
            for dataset in orm.Dataset.objects.all():
                if dataset.initial_upload:
                    dataset.column_types = dataset.initial_upload.guessed_types

                    # Account for bug where columns sometimes were not copied across
                    if not dataset.columns:
                        dataset.columns = dataset.initial_upload.columns
                else:
                    dataset.column_types = ['unicode' for c in dataset.columns]

                dataset.typed_column_names = [None for c in dataset.columns]

                dataset.save()
Example #10
0
    def restore_indexes(self):
        db.start_transaction()
        for action_class, index in self.get_indexes():
            if issubclass(action_class, AddIndex):
                # если поле уникальное, то индекс не надо - надо создать ограничение
                if len(index['fields']) == 1 and index['fields'][0].unique:
                    pass
                else:
                    db.create_index(
                        index['model']._meta.db_table,
                        [field.column for field in index['fields']])
                # если поле одно и имеет тип varchar или text
                # то для postgresql должен быть еще один индекс с суффиксом _like
                # http://south.aeracode.org/ticket/1214
                if len(index['fields']) == 1 and db._get_connection(
                ).vendor == 'postgresql':
                    db_type = index['fields'][0].db_type(
                        connection=db._get_connection())
                    if (db_type.startswith('varchar') or db_type == 'uuid'):
                        self.create_index(
                            db,
                            index['model']._meta.db_table,
                            [field.column for field in index['fields']],
                            unique=not issubclass(action_class, AddIndex))

            elif issubclass(action_class, AddUnique):
                db.create_unique(index['model']._meta.db_table,
                                 [field.column for field in index['fields']])

        db.commit_transaction()
Example #11
0
    def forwards(self, orm):
        #   Save the original verb names
        original_verb_len = len('V/Flags/Registration/')
        ## MIT-specific fix:  Make sure all SCRMI's actually have regg verbs

        default_verb = GetNode("V/Flags/Registration/Enrolled")

        verb_map = {}
        name_map = {}
        for item in StudentClassRegModuleInfo.objects.all().values_list('id', 'signup_verb_id'):
            verb_map[item[0]] = item[1] if item[1] else default_verb.id
        for key, val in verb_map.iteritems():
            name_map[key] = DataTree.objects.get(id=val).get_uri()[original_verb_len:]

        #   Delete the verbs (need to allow null values)
        db.start_transaction()
        db.alter_column('modules_studentclassregmoduleinfo', 'signup_verb_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['datatree.DataTree'], null=True))
        for item in StudentClassRegModuleInfo.objects.all():
            item.signup_verb = None
            item.save()
        db.commit_transaction()
        
        #   Changing field 'StudentClassRegModuleInfo.signup_verb'
        db.alter_column('modules_studentclassregmoduleinfo', 'signup_verb_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['program.RegistrationType'], null=True))
        db.start_transaction()
        #   Change verb IDs to RegistrationTypes
        for item in StudentClassRegModuleInfo.objects.all():
            item.signup_verb = RegistrationType.get_map(include=[name_map[item.id]], category='student')[name_map[item.id]]
            item.save()
        db.commit_transaction()
        
        db.alter_column('modules_studentclassregmoduleinfo', 'signup_verb_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['program.RegistrationType']))
    def forwards(self, orm):
        db.start_transaction()
        db.clear_table('lizard_rainapp_rainvalue')
        db.clear_table('lizard_rainapp_completerainvalue')
        db.clear_table('lizard_rainapp_geoobject')
        db.commit_transaction()

        # Adding model 'RainappConfig'
        db.create_table('lizard_rainapp_rainappconfig', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
            ('jdbcsource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['lizard_fewsjdbc.JdbcSource'])),
            ('filter_id', self.gf('django.db.models.fields.CharField')(max_length=128)),
        ))
        db.send_create_signal('lizard_rainapp', ['RainappConfig'])

        # Adding field 'RainValue.config'
        db.add_column('lizard_rainapp_rainvalue', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)

        # Adding field 'CompleteRainValue.config'
        db.add_column('lizard_rainapp_completerainvalue', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)

        # Deleting field 'GeoObject.filterkey'
        db.delete_column('lizard_rainapp_geoobject', 'filterkey')

        # Adding field 'GeoObject.config'
        db.add_column('lizard_rainapp_geoobject', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)
Example #13
0
    def forwards(self, orm):
        # Adding field 'Exercise.user'
        db.add_column('exercises_exercise',
                      'user',
                      self.gf('django.db.models.fields.related.ForeignKey')(
                          to=orm['auth.User'], null=True, blank=True),
                      keep_default=False)

        # Adding field 'Exercise.status'
        db.add_column('exercises_exercise',
                      'status',
                      self.gf('django.db.models.fields.CharField')(
                          default='1', max_length=2),
                      keep_default=False)

        # Set the status 'system' for all existing exercises in the database
        db.start_transaction()
        if not db.dry_run:
            orm.Exercise.objects.all().update(status='5')
        db.commit_transaction()
        # Note: we need to open and close the transaction here for postgresql.
        #       see https://github.com/rolandgeider/wger/issues/26

        # Adding field 'Exercise.creation_date'
        db.add_column('exercises_exercise',
                      'creation_date',
                      self.gf('django.db.models.fields.DateField')(
                          auto_now_add=True, null=True, blank=True),
                      keep_default=False)
Example #14
0
    def forwards(self, orm):

        # Do the deletes in a separate transaction, as database errors when
        # deleting a table that does not exist would cause a transaction to be
        # rolled back
        db.start_transaction()

        ContentType.objects.filter(app_label=self.app_name).delete()

        # Remove the entries from South's tables as we don't want to leave
        # incorrect entries in there.
        MigrationHistory.objects.filter(app_name=self.app_name).delete()

        # Commit the deletes to the various tables.
        db.commit_transaction()

        for table in self.tables:

            # Check to see if this table exists. db.execute will return
            # something like [(n, )] where n is the count(*)
            table_exists = db.execute(
                "SELECT count(*) from pg_tables where tablename = '{0}'".
                format(table))
            if table_exists and table_exists[0][0]:
                db.delete_table(table)
Example #15
0
    def backwards(self, orm):

        #   Save the verb names
        verb_map = {}
        name_map = {}
        for item in StudentClassRegModuleInfo.objects.all().values_list('id', 'signup_verb_id'):
            verb_map[item[0]] = item[1]
        for id in verb_map:
            name_map[id] = RegistrationType.objects.get(id=verb_map[id]).name

        #   Delete the verbs (need to allow null values)
        db.start_transaction()
        db.alter_column('modules_studentclassregmoduleinfo', 'signup_verb_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['program.RegistrationType'], null=True))
        for item in StudentClassRegModuleInfo.objects.all():
            item.signup_verb = None
            item.save()
        db.commit_transaction()

        #   Changing field 'StudentClassRegModuleInfo.signup_verb'
        db.alter_column('modules_studentclassregmoduleinfo', 'signup_verb_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['datatree.DataTree'], null=True))
        db.start_transaction()
        #   Change verb IDs back to DataTrees
        for item in StudentClassRegModuleInfo.objects.all():
            item.signup_verb_id = DataTree.get_by_uri('V/Flags/Registration/%s' % name_map[item.id], create=True).id
            item.save()
        db.commit_transaction()
            
        db.alter_column('modules_studentclassregmoduleinfo', 'signup_verb_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['datatree.DataTree']))
 def forwards(self, orm):
     # Adding index on 'DeviceReportEntry', fields ['domain', 'date']
     db.commit_transaction()
     db.execute(
         'CREATE INDEX CONCURRENTLY "phonelog_devicereportentry_domain_date" ON "phonelog_devicereportentry" ("domain","date")'
     )
     db.start_transaction()
Example #17
0
 def test_dry_rename(self):
     """
     Test column renaming while --dry-run is turned on (should do nothing)
     See ticket #65
     """
     cursor = connection.cursor()
     db.create_table("test_drn", [('spam', models.BooleanField(default=False))])
     # Make sure we can select the column
     cursor.execute("SELECT spam FROM test_drn")
     # Rename it
     db.dry_run = True
     db.rename_column("test_drn", "spam", "eggs")
     db.dry_run = False
     cursor.execute("SELECT spam FROM test_drn")
     db.commit_transaction()
     db.start_transaction()
     try:
         cursor.execute("SELECT eggs FROM test_drn")
     except:
         pass
     else:
         self.fail("Dry-renamed new column could be selected!")
     db.rollback_transaction()
     db.delete_table("test_drn")
     db.start_transaction()
    def forwards(self, orm):
        
        # Removing index on 'CaseActionData', fields ['action_type']
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_action_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_user_id_like")

        db.commit_transaction()
        db.start_transaction()

        db.execute("DROP INDEX IF EXISTS sofabed_casedata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_closed_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_doc_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_domain_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_modified_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_opened_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_owner_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_user_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_version_like")

        db.commit_transaction()
        db.start_transaction()

        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_identifier_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_referenced_type_like")
 def drop_db_model(self, django_class):
     """ Migrate the DB to remove a single model. """
     # Drop the table. Also force a commit, or we'll have trouble with pending triggers in future operations.
     table_name = django_class._meta.db_table
     db.start_transaction()
     db.delete_table(table_name)
     db.commit_transaction()
    def forwards(self, orm):

        # Removing unique constraint on 'BadgeData', fields ['type', 'name']
        db.delete_unique('askbot_badgedata', ['type', 'name'])

        # Deleting field 'BadgeData.multiple'
        db.delete_column('askbot_badgedata', 'multiple')

        # Deleting field 'BadgeData.description'
        db.delete_column('askbot_badgedata', 'description')


        # Deleting field 'BadgeData.type'
        db.delete_column('askbot_badgedata', 'type')

        # Deleting field 'BadgeData.name'
        db.delete_column('askbot_badgedata', 'name')

        # Changing field 'BadgeData.slug'
        db.alter_column('askbot_badgedata', 'slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50))
        # Adding unique constraint on 'BadgeData', fields ['slug']
        return
        try:#work around the South 0.7.3 bug
            db.start_transaction()
            db.create_unique('askbot_badgedata', ['slug'])
            db.commit_transaction()
        except:
            db.rollback_transaction()
Example #21
0
def rename_db_table(old_table_name, new_table_name):
    """ Rename a sensor's database column. """
    db.start_transaction()
    db.rename_table(old_table_name, new_table_name)
    logger.debug("Renamed table '%s' to '%s'" %
                 (old_table_name, new_table_name))
    db.commit_transaction()
    def forwards(self, orm):
        # Adding field 'Post.is_private'
        db.start_transaction()
        safe_add_column(
            'askbot_post',
            'is_private',
            self.gf('django.db.models.fields.BooleanField')(default=False),
            keep_default=False)

        # Adding field 'ReplyAddress.reply_action'
        safe_add_column('askbot_replyaddress',
                        'reply_action',
                        self.gf('django.db.models.fields.CharField')(
                            default='auto_answer_or_comment', max_length=32),
                        keep_default=False)

        # Changing field 'ReplyAddress.post'
        db.alter_column(
            'askbot_replyaddress', 'post_id',
            self.gf('django.db.models.fields.related.ForeignKey')(
                null=True, to=orm['askbot.Post']))
        db.commit_transaction()

        try:
            db.start_transaction()
            # Adding field 'User.interesting_tags'
            safe_add_column(u'auth_user',
                            'email_signature',
                            self.gf('django.db.models.fields.TextField')(
                                blank=True, default=''),
                            keep_default=False)
            db.commit_transaction()
        except:
            db.rollback_transaction()
Example #23
0
def rename_db_column(table_name, old_name, new_name):
    """ Rename a sensor's database column. """
    db.start_transaction()
    db.rename_column(table_name, old_name, new_name)
    logger.debug("Renamed column '%s' to '%s' on %s" %
                 (old_name, new_name, table_name))
    db.commit_transaction()
Example #24
0
    def forwards(self, orm):
        if hasattr(Appointment, 'created_by'):
            db.start_transaction()
            for a in Appointment.all_objects.all():
                a.created_by = a.healer.user
                a.created_date = a.start
                a.last_modified_by = a.healer.user
                a.last_modified_date = a.start
                a.save()
            db.commit_transaction()

        # Changing field 'Appointment.last_modified_date'
        db.alter_column(
            'healers_appointment', 'last_modified_date',
            self.gf('django.db.models.fields.DateTimeField')(default=None))

        # Changing field 'Appointment.created_by'
        db.alter_column(
            'healers_appointment', 'created_by_id',
            self.gf('django.db.models.fields.related.ForeignKey')(
                default=None, to=orm['auth.User']))

        # Changing field 'Appointment.last_modified_by'
        db.alter_column(
            'healers_appointment', 'last_modified_by_id',
            self.gf('django.db.models.fields.related.ForeignKey')(
                default=None, to=orm['auth.User']))

        # Changing field 'Appointment.created_date'
        db.alter_column(
            'healers_appointment', 'created_date',
            self.gf('django.db.models.fields.DateTimeField')(default=None))
Example #25
0
    def forwards(self, orm):

        # Do the deletes in a separate transaction, as database errors when
        # deleting a table that does not exist would cause a transaction to be
        # rolled back
        db.start_transaction()

        ContentType.objects.filter(app_label=self.app_name).delete()

        # Remove the entries from South's tables as we don't want to leave
        # incorrect entries in there.
        MigrationHistory.objects.filter(app_name=self.app_name).delete()

        # Commit the deletes to the various tables.
        db.commit_transaction()

        for table in self.tables:

            # Check to see if this table exists. db.execute will return
            # something like [(n, )] where n is the count(*)
            table_exists = db.execute(
                "SELECT count(*) from pg_tables where tablename = '{0}'".format(table)
            )
            if table_exists and table_exists[0][0]:
                db.delete_table(table)
Example #26
0
    def forwards(self, orm):

        # Removing index on 'CaseActionData', fields ['action_type']
        db.execute(
            "DROP INDEX IF EXISTS sofabed_caseactiondata_action_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_user_id_like")

        db.commit_transaction()
        db.start_transaction()

        db.execute("DROP INDEX IF EXISTS sofabed_casedata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_closed_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_doc_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_domain_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_modified_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_opened_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_owner_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_user_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_version_like")

        db.commit_transaction()
        db.start_transaction()

        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_case_id_like")
        db.execute(
            "DROP INDEX IF EXISTS sofabed_caseindexdata_identifier_like")
        db.execute(
            "DROP INDEX IF EXISTS sofabed_caseindexdata_referenced_type_like")
    def forwards(self, orm):

        # move some models from maps to layers app
        
        # 0. add some missing fields (not for wfp)
        db.start_transaction()
        try:
            # Adding field 'Layer.bbox_top'
            db.add_column('maps_layer', 'bbox_top', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            # Adding field 'Layer.bbox_bottom'
            db.add_column('maps_layer', 'bbox_bottom', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            # Adding field 'Layer.bbox_left'
            db.add_column('maps_layer', 'bbox_left', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            # Adding field 'Layer.bbox_right'
            db.add_column('maps_layer', 'bbox_right', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            db.commit_transaction()
        except:
            print 'No need to create the fields, they are already there'
            db.rollback_transaction()
            
        # 1. layers_layer moved from maps_layer
        db.rename_table('maps_layer', 'layers_layer') 
        if not db.dry_run:
            orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='layer').update(app_label='layers')
            
        # 2. layers_contactrole moved from maps_contactrole
        db.rename_table('maps_contactrole', 'layers_contactrole') 
        if not db.dry_run:
            orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='contactrole').update(app_label='layers')
Example #28
0
def run_forwards(app, migrations, fake=False, db_dry_run=False, silent=False):
    """
    Runs the specified migrations forwards, in order.
    """
    for migration in migrations:
        app_name = get_app_name(app)
        if not silent:
            print " > %s: %s" % (app_name, migration)
        klass = get_migration(app, migration)

        if fake:
            if not silent:
                print "   (faked)"
        else:
            if db_dry_run:
                db.dry_run = True
                
            db.start_transaction()
            try:
                klass().forwards()
                db.execute_deferred_sql()
            except:
                db.rollback_transaction()
                raise
            else:
                db.commit_transaction()

        if not db_dry_run:
            # Record us as having done this
            record = MigrationHistory.for_migration(app_name, migration)
            record.applied = datetime.datetime.utcnow()
            record.save()
    def forwards(self, orm):
        db.start_transaction()
        # geom becomes geom_3d
        db.rename_column('l_t_troncon', 'geom', 'geom_3d')
        db.execute("ALTER TABLE l_t_troncon ALTER COLUMN geom_3d SET DEFAULT NULL;")
        # Create 2D topology
        db.add_column('l_t_troncon', 'geom',
                      self.gf('django.contrib.gis.db.models.fields.LineStringField')(srid=settings.SRID, default=GEOSGeometry('LINESTRING EMPTY'), spatial_index=False),
                      keep_default=False)
        # geom becomes geom_3d
        db.rename_column('e_t_evenement', 'geom', 'geom_3d')
        db.execute("ALTER TABLE e_t_evenement ALTER COLUMN geom_3d SET DEFAULT NULL;")
        # Create 2D topology
        db.add_column('e_t_evenement', 'geom',
                      self.gf('django.contrib.gis.db.models.fields.GeometryField')(srid=settings.SRID, null=True, default=None, spatial_index=False))
        # Switch cadastre to 2D
        db.alter_column('l_t_troncon', 'geom_cadastre', self.gf('django.contrib.gis.db.models.fields.LineStringField')(srid=settings.SRID, null=True, spatial_index=False))
        db.commit_transaction()

        #
        # Data migration
        #
        db.start_transaction()
        db.execute("UPDATE l_t_troncon SET geom = ST_force_2D(geom_3d);")
        db.execute("UPDATE e_t_evenement SET geom = ST_force_2D(geom_3d);")
        db.commit_transaction()
    def forwards(self, orm):
        # If there are duplicated document permissions, we'll have an error
        # when we try to create this index. So to protect against that, we
        # should delete those documents before we create the index.

        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
        try:
            db.start_transaction()
            duplicated_records = DocumentPermission.objects \
                .values('doc_id', 'perms') \
                .annotate(id_count=models.Count('id')) \
                .filter(id_count__gt=1)

            # Delete all but the first document.
            for record in duplicated_records:
                docs = DocumentPermission.objects \
                    .values_list('id', flat=True) \
                    .filter(
                        doc_id=record['doc_id'],
                        perms=record['perms'],
                    )[1:]

                docs = list(docs)

                logging.warn('Deleting permissions %s' % docs)

                DocumentPermission.objects.filter(id__in=docs).delete()
            db.commit_transaction()
        except Exception, e:
            db.rollback_transaction()
            raise e
Example #31
0
def add_necessary_db_columns(model_class):
    """ Creates new table or relevant columns as necessary based on the model_class.
        No columns or data are renamed or removed.
        This is available in case a database exception occurs.
    """
    db.start_transaction()

    # Create table if missing
    create_db_table(model_class)

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]

    for field_name, field in fields:
        if field.column not in db_column_names:
            logger.debug("Adding field '%s' to table '%s'" % (field_name, table_name))
            db.add_column(table_name, field_name, field)


    # Some columns require deferred SQL to be run. This was collected 
    # when running db.add_column().
    db.execute_deferred_sql()

    db.commit_transaction()
Example #32
0
def rename_db_column(model_class, old_name, new_name):
    """ Rename a sensor's database column. """
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.rename_column(table_name, old_name, new_name) 
    logger.debug("Renamed column '%s' to '%s' on %s" % (old_name, new_name, table_name))
    db.commit_transaction()
Example #33
0
def drop_field(app_name, model_name, field_name):
    """
    Drop the given field from the app's model
    """
    # Table is typically appname_modelname, but it could be different
    #   always best to be sure.
    from .fields import CategoryM2MField, CategoryFKField
    from .settings import FIELD_REGISTRY
    try:
        from south.db import db
    except ImportError:
        raise ImproperlyConfigured(_('%(dependency) must be installed for this command to work') %
                                   {'dependency' : 'South'})
    mdl = models.get_model(app_name, model_name)

    fld = '%s.%s.%s' % (app_name, model_name, field_name)

    if isinstance(FIELD_REGISTRY[fld], CategoryFKField):
        print (_('Dropping ForeignKey %(field_name) from %(model_name)') %
               {'field_name' : field_name, 'model_name' : model_name})
        try:
            db.start_transaction()
            table_name = mdl._meta.db_table
            db.delete_column(table_name, field_name)
            db.commit_transaction()
        except DatabaseError, e:
            db.rollback_transaction()
            raise e
Example #34
0
File: db.py Project: daasara/riba
 def test_alter_constraints(self):
     """
     Tests that going from a PostiveIntegerField to an IntegerField drops
     the constraint on the database.
     """
     # Only applies to databases that support CHECK constraints
     if not db.has_check_constraints:
         return
     # Make the test table
     db.create_table("test_alterc", [
         ('num', models.PositiveIntegerField()),
     ])
     # Add in some test values
     db.execute("INSERT INTO test_alterc (num) VALUES (1)")
     db.execute("INSERT INTO test_alterc (num) VALUES (2)")
     # Ensure that adding a negative number is bad
     db.commit_transaction()
     db.start_transaction()
     try:
         db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
     except:
         db.rollback_transaction()
     else:
         self.fail("Could insert a negative integer into a PositiveIntegerField.")
     # Alter it to a normal IntegerField
     db.alter_column("test_alterc", "num", models.IntegerField())
     # It should now work
     db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
     db.delete_table("test_alterc")
     # We need to match up for tearDown
     db.start_transaction()
Example #35
0
def rename_db_column(model_class, old_name, new_name):
    """ Renomear uma coluna de banco de dados do sensor. """
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.rename_column(table_name, old_name, new_name) 
    logger.debug("Renamed column '%s' to '%s' on %s" % (old_name, new_name, table_name))
    db.commit_transaction()
Example #36
0
 def createTable(self):
     """
     Sets up the database table using self.field_list
     """
     
     if not self.field_list:
         self._getModelFieldList()
     
     if not transaction.is_managed():
         db.start_transaction()
         db.create_table(self._tname, tuple(self.field_list))
         
         # Executing deferred SQL, after correcting the CREATE INDEX statements
         deferred_sql = []
         for stmt in db.deferred_sql:
             deferred_sql.append(re.sub('^CREATE INDEX \"customforms\".', 'CREATE INDEX ', stmt))
         db.deferred_sql = deferred_sql    
         db.execute_deferred_sql()    
         db.commit_transaction()
     else:
         db.create_table(self._tname, tuple(self.field_list))
         
         # Executing deferred SQL, after correcting the CREATE INDEX statements
         deferred_sql = []
         for stmt in db.deferred_sql:
             deferred_sql.append(re.sub('^CREATE INDEX \"customforms\".', 'CREATE INDEX ', stmt))
         db.deferred_sql = deferred_sql    
         db.execute_deferred_sql()    
Example #37
0
def add_necessary_db_columns(model_class):
    '''
    Takes a Django model class and creates relevant columns as necessary based
    on the model_class. No columns or data are renamed or removed.
    This is available in case a database exception occurs.
    '''

    db.start_transaction()

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]
    for field_name, field in fields:
        if field.column not in db_column_names:
            try:
                db.add_column(table_name, field_name, field)
            except ValueError:
                field.null=True
                db.add_column(table_name, field_name, field)


    # Some columns require deferred SQL to be run. This was collected 
    # when running db.add_column().
    db.execute_deferred_sql()

    db.commit_transaction()
Example #38
0
def add_necessary_db_columns(model_class):
    """ Creates new table or relevant columns as necessary based on the model_class.
        No columns or data are renamed or removed.
        This is available in case a database exception occurs.
    """
    db.start_transaction()

    # Create table if missing
    create_db_table(model_class)

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [
        row[0] for row in connection.introspection.get_table_description(
            connection.cursor(), table_name)
    ]

    for field_name, field in fields:
        if field.column not in db_column_names:
            logger.debug("Adding field '%s' to table '%s'" %
                         (field_name, table_name))
            db.add_column(table_name, field_name, field)

    # Some columns require deferred SQL to be run. This was collected
    # when running db.add_column().
    db.execute_deferred_sql()

    db.commit_transaction()
Example #39
0
 def backwards(self, orm):
     db.start_transaction()
     # Delete public access column
     db.delete_column('tardis_portal_experiment', 'public_access')
     # Renaming field 'Experiment.public' to 'Experiment.locked'
     db.rename_column('tardis_portal_experiment', 'locked', 'public')
     db.commit_transaction()
    def forwards(self, orm):
        # Removing unique constraint on 'Build', fields ['node']
        db.delete_unique(u'firmware_build', ['node_id'])

        # Adding field 'BaseImage.name'
        db.add_column(u'firmware_baseimage',
                      'name',
                      self.gf('django.db.models.fields.CharField')(
                          default='baseimage', unique=False, max_length=256),
                      keep_default=False)

        # Dynamic value for field 'BaseImage.name'
        db.start_transaction()
        for base_image in orm['firmware.BaseImage'].objects.all():
            base_image.name = base_image.image
            base_image.save()
        db.commit_transaction()

        # Adding unique constraint to 'BaseImage.name'
        db.create_unique(u'firmware_baseimage', ['name'])

        # Changing field 'Build.node'
        db.alter_column(
            u'firmware_build', 'node_id',
            self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['nodes.Node']))
Example #41
0
 def test_alter_constraints(self):
     """
     Tests that going from a PostiveIntegerField to an IntegerField drops
     the constraint on the database.
     """
     # Only applies to databases that support CHECK constraints
     if not db.has_check_constraints:
         return
     # Make the test table
     db.create_table("test_alterc", [
         ('num', models.PositiveIntegerField()),
     ])
     # Add in some test values
     db.execute("INSERT INTO test_alterc (num) VALUES (1)")
     db.execute("INSERT INTO test_alterc (num) VALUES (2)")
     # Ensure that adding a negative number is bad
     db.commit_transaction()
     db.start_transaction()
     try:
         db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
     except:
         db.rollback_transaction()
     else:
         self.fail("Could insert a negative integer into a PositiveIntegerField.")
     # Alter it to a normal IntegerField
     db.alter_column("test_alterc", "num", models.IntegerField())
     # It should now work
     db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
     db.delete_table("test_alterc")
     # We need to match up for tearDown
     db.start_transaction()
Example #42
0
    def forwards(self, orm):

        # Removing unique constraint on 'BadgeData', fields ['type', 'name']
        #db.delete_unique('askbot_badgedata', ['type', 'name'])

        # Deleting field 'BadgeData.multiple'
        db.delete_column('askbot_badgedata', 'multiple')

        # Deleting field 'BadgeData.description'
        db.delete_column('askbot_badgedata', 'description')

        # Deleting field 'BadgeData.type'
        db.delete_column('askbot_badgedata', 'type')

        # Deleting field 'BadgeData.name'
        db.delete_column('askbot_badgedata', 'name')

        # Changing field 'BadgeData.slug'
        db.alter_column(
            'askbot_badgedata', 'slug',
            self.gf('django.db.models.fields.SlugField')(unique=True,
                                                         max_length=50))

        # Adding unique constraint on 'BadgeData', fields ['slug']
        try:  #work around the South 0.7.3 bug
            db.start_transaction()
            db.create_unique('askbot_badgedata', ['slug'])
            db.commit_transaction()
        except:
            db.rollback_transaction()
    def forwards(self, orm):
        
        try:
            # Deleting model 'Log'
            db.delete_table('eff_log')
        
            db.add_column('eff_client', 'external_source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['eff.ExternalSource'], null=True), keep_default=False)

            db.add_column('eff_client', 'external_id', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True), keep_default=False)

            db.start_transaction()
            for cli in Client.objects.all():
                cli.external_id = cli.name
                cli_proj = Project.objects.filter(client=cli)
                if cli_proj:
                    ext_src_id = db.execute("select external_source_id from eff_project where id=%s" % cli_proj[0].id)[0][0]
                    cli.external_source = ExternalSource.objects.get(id=ext_src_id)
                else:
                    cli.external_source = ExternalSource.objects.get(name="DotprojectMachinalis")
                cli.save()
            db.commit_transaction()

            # Deleting field 'Project.external_source'
            db.delete_column('eff_project', 'external_source_id')

            # Deleting field 'Project.log'
            db.delete_column('eff_project', 'log_id')
        except:
            pass
Example #44
0
def drop_field(app_name, model_name, field_name):
    """
    Drop the given field from the app's model
    """
    # Table is typically appname_modelname, but it could be different
    #   always best to be sure.
    from .fields import CategoryM2MField, CategoryFKField
    from .settings import FIELD_REGISTRY
    try:
        from south.db import db
    except ImportError:
        raise ImproperlyConfigured(
            _('%(dependency) must be installed for this command to work') %
            {'dependency': 'South'})
    mdl = models.get_model(app_name, model_name)

    fld = '%s.%s.%s' % (app_name, model_name, field_name)

    if isinstance(FIELD_REGISTRY[fld], CategoryFKField):
        print(
            _('Dropping ForeignKey %(field_name) from %(model_name)') % {
                'field_name': field_name,
                'model_name': model_name
            })
        try:
            db.start_transaction()
            table_name = mdl._meta.db_table
            db.delete_column(table_name, field_name)
            db.commit_transaction()
        except DatabaseError, e:
            db.rollback_transaction()
            raise e
Example #45
0
def safe_add_column(table, column, column_data, keep_default=False):
    """when user calls syncdb with askbot the first time
    the auth_user table will be created together with the patched columns
    so, we need to add these columns here in separate transactions
    and roll back if they fail, if we want we could also record - which columns clash
    """
    if db.backend_name in ('mysql', 'postgres'):
        if len(
                db.execute(
                    'select column_name from information_schema.columns where table_name=%s and column_name=%s',
                    params=[table, column])) == 0:
            db.add_column(table,
                          column,
                          column_data,
                          keep_default=keep_default)
    else:
        try:
            db.start_transaction()
            db.add_column(table,
                          column,
                          column_data,
                          keep_default=keep_default)
            db.commit_transaction()
            return True
        except:
            db.rollback_transaction()
            return False
Example #46
0
def add_necessary_db_columns(model_class):
    """ Cria nova tabela ou colunas pertinentes, se necessário com base no model_class.
         Sem colunas ou dados são renomeados ou removidos.
         Esta opção está disponível no caso de uma exceção de banco de dados ocorre.
    """
    db.start_transaction()

    # Create table if missing
    create_db_table(model_class)

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]

    for field_name, field in fields:
        if field.column not in db_column_names:
            logger.debug("Adding field '%s' to table '%s'" % (field_name, table_name))
            db.add_column(table_name, field_name, field)


     # Algumas colunas necessitam de SQL adiada para ser executado. Este foi recolhido
     # Durante a execução db.add_column ().
    db.execute_deferred_sql()

    db.commit_transaction()
    def forwards(self, orm):
        
        # Adding field 'Dataset.column_types'
        db.add_column('panda_dataset', 'column_types', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.typed_column_names'
        db.add_column('panda_dataset', 'typed_column_names', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        db.commit_transaction()     # Commit the first transaction
        db.start_transaction()      # Start the second, committed on completion

        if not db.dry_run:
            for dataset in orm.Dataset.objects.all():
                if dataset.initial_upload:
                    dataset.column_types = dataset.initial_upload.guessed_types

                    # Account for bug where columns sometimes were not copied across
                    if not dataset.columns:
                        dataset.columns = dataset.initial_upload.columns
                else:
                    dataset.column_types = ['unicode' for c in dataset.columns]

                dataset.typed_column_names = [None for c in dataset.columns]

                dataset.save()
Example #48
0
File: db.py Project: 10sr/hue
    def test_change_foreign_key_target(self):
        # Tables for FK to target
        User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
        db.create_table("test_fk_changed_target", [
            ('eggs', models.IntegerField(primary_key=True)),
        ])
        Egg = db.mock_model(model_name='Egg', db_table='test_fk_changed_target', db_tablespace='', pk_field_name='eggs', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
        # Table with a foreign key to the wrong table
        db.create_table("test_fk_changing", [
            ('egg', models.ForeignKey(User, null=True)),
        ])
        db.execute_deferred_sql()

        # Change foreign key pointing
        db.alter_column("test_fk_changing", "egg_id", models.ForeignKey(Egg, null=True))
        db.execute_deferred_sql()

        # Test that it is pointing at the right table now
        try:
            non_user_id = db.execute("SELECT MAX(id) FROM auth_user")[0][0] + 1
        except (TypeError, IndexError):
            # Got a "None" or no records, treat as 0
            non_user_id = 17
        db.execute("INSERT INTO test_fk_changed_target (eggs) VALUES (%s)", [non_user_id])
        db.execute("INSERT INTO test_fk_changing (egg_id) VALUES (%s)", [non_user_id])
        db.commit_transaction()
        db.start_transaction()  # The test framework expects tests to end in transaction
Example #49
0
 def test_dry_rename(self):
     """
     Test column renaming while --dry-run is turned on (should do nothing)
     See ticket #65
     """
     cursor = connection.cursor()
     db.create_table("test_drn",
                     [('spam', models.BooleanField(default=False))])
     # Make sure we can select the column
     cursor.execute("SELECT spam FROM test_drn")
     # Rename it
     db.dry_run = True
     db.rename_column("test_drn", "spam", "eggs")
     db.dry_run = False
     cursor.execute("SELECT spam FROM test_drn")
     db.commit_transaction()
     db.start_transaction()
     try:
         cursor.execute("SELECT eggs FROM test_drn")
     except:
         pass
     else:
         self.fail("Dry-renamed new column could be selected!")
     db.rollback_transaction()
     db.delete_table("test_drn")
     db.start_transaction()
Example #50
0
 def deleteTable(self):
     """
     Deletes the response table for the current form
     """
     db.start_transaction()
     db.delete_table(self._tname)
     db.commit_transaction()
Example #51
0
def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, silent=False):
    """
    Runs the specified migrations backwards, in order, skipping those
    migrations in 'ignore'.
    """
    for migration in migrations:
        if migration not in ignore:
            app_name = get_app_name(app)
            if not silent:
                print " < %s: %s" % (app_name, migration)
            klass = get_migration(app, migration)
            if fake:
                if not silent:
                    print "   (faked)"
            else:
                if db_dry_run:
                    db.dry_run = True
                
                db.start_transaction()
                try:
                    klass().backwards()
                    db.execute_deferred_sql()
                except:
                    db.rollback_transaction()
                    raise
                else:
                    db.commit_transaction()

            if not db_dry_run:
                # Record us as having not done this
                record = MigrationHistory.for_migration(app_name, migration)
                record.delete()
Example #52
0
    def forwards(self, orm):
        db.start_transaction()
        cursor = connection.cursor()
        cursor.execute('select * from zorna_forms_form')
        qs = cursor.fetchall()

        # Adding model 'FormsFormActionUrl'
        db.create_table('zorna_forms_form_action_urls', (
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('form', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['forms.FormsForm'], null=True)),
            ('url',
             self.gf('django.db.models.fields.CharField')(max_length=255)),
        ))
        db.send_create_signal('forms', ['FormsFormActionUrl'])

        # Adding model 'FormsFormAction'
        db.create_table('zorna_forms_form_actions', (
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('form', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['forms.FormsForm'], null=True)),
            ('content_type',
             self.gf('django.db.models.fields.related.ForeignKey')(
                 to=orm['contenttypes.ContentType'])),
            ('object_id', self.gf('django.db.models.fields.IntegerField')()),
        ))
        db.send_create_signal('forms', ['FormsFormAction'])

        # Adding model 'FormsFormActionMessage'
        db.create_table('zorna_forms_form_action_messages', (
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('form', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['forms.FormsForm'], null=True)),
            ('message', self.gf('django.db.models.fields.TextField')()),
        ))
        db.send_create_signal('forms', ['FormsFormActionMessage'])

        # Deleting field 'FormsForm.response'
        db.delete_column('zorna_forms_form', 'response')

        from django.contrib.contenttypes.management import update_contenttypes
        from django.db.models import get_app, get_models
        update_contenttypes(get_app('forms'), get_models())
        if not db.dry_run:
            db.commit_transaction()
            db.start_transaction()
            ct = orm['contenttypes.ContentType'].objects.get(
                app_label="forms", model="formsformactionmessage")
            for form in qs:
                rep = orm.FormsFormActionMessage(message=form[13],
                                                 form_id=form[0])
                rep.save()
                orm.FormsFormAction.objects.create(form_id=form[0],
                                                   object_id=rep.pk,
                                                   content_type=ct)
        db.commit_transaction()
Example #53
0
 def backwards(self, orm):
     """Remove new ci types"""
     db.start_transaction()
     db.execute('DELETE FROM cmdb_cilayer_connected_types WHERE citype_id > 10 and citype_id < 1000;')
     db.execute('DELETE FROM cmdb_ciattribute_ci_types WHERE citype_id > 10 and citype_id < 1000;')
     db.execute('DELETE FROM cmdb_ci WHERE type_id > 10 and type_id < 1000;')
     db.execute('DELETE FROM cmdb_citype WHERE id > 10 and id < 1000;')
     db.commit_transaction()
Example #54
0
def rename_db_column(model_class, old_name, new_name):
    """ Rename a database column. """
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.rename_column(table_name, old_name, new_name)
    logger.debug("Renamed column '%s' to '%s' on %s" %
                 (old_name, new_name, table_name))
    db.commit_transaction()
Example #55
0
def delete_db_table(model_class):
    '''
    Takes a Django model class and deletes the database table.
    '''
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.delete_table(table_name)
    db.commit_transaction()
 def forwards(self, orm):
     if not db.dry_run:
         for token in orm['facebook_auth.UserToken'].objects.all():
             orm['facebook_auth.UserToken'].objects.filter(token=token.token, id__gt=token.id).delete()
     db.commit_transaction()
     db.start_transaction()
     # Adding unique constraint on 'UserToken', fields ['token']
     db.create_unique(u'facebook_auth_usertoken', ['token'])
 def backwards(self, orm):
     db.start_transaction()
     USER_TABLE = orm['auth.user']._meta.db_table
     db.delete_unique(USER_TABLE, ['email'])
     db.alter_column(USER_TABLE, 'username', models.CharField(_('username'), max_length=75, unique=True,
                    help_text=_('Required. 30 characters or fewer. Letters, numbers and '
                                '@/./+/-/_ characters')))
     db.commit_transaction()
 def forwards(self, orm):
     # Adding index on 'Group', fields ['project', 'first_release']
     if is_postgres():
         db.commit_transaction()
         db.execute("CREATE INDEX CONCURRENTLY sentry_groupedmessage_project_id_31335ae34c8ef983 ON sentry_groupedmessage (project_id, first_release_id)")
         db.start_transaction()
     else:
         db.create_index('sentry_groupedmessage', ['project_id', 'first_release_id'])