コード例 #1
0
ファイル: db.py プロジェクト: 10sr/hue
    def test_change_foreign_key_target(self):
        # Tables for FK to target
        User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
        db.create_table("test_fk_changed_target", [
            ('eggs', models.IntegerField(primary_key=True)),
        ])
        Egg = db.mock_model(model_name='Egg', db_table='test_fk_changed_target', db_tablespace='', pk_field_name='eggs', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={})
        # Table with a foreign key to the wrong table
        db.create_table("test_fk_changing", [
            ('egg', models.ForeignKey(User, null=True)),
        ])
        db.execute_deferred_sql()

        # Change foreign key pointing
        db.alter_column("test_fk_changing", "egg_id", models.ForeignKey(Egg, null=True))
        db.execute_deferred_sql()

        # Test that it is pointing at the right table now
        try:
            non_user_id = db.execute("SELECT MAX(id) FROM auth_user")[0][0] + 1
        except (TypeError, IndexError):
            # Got a "None" or no records, treat as 0
            non_user_id = 17
        db.execute("INSERT INTO test_fk_changed_target (eggs) VALUES (%s)", [non_user_id])
        db.execute("INSERT INTO test_fk_changing (egg_id) VALUES (%s)", [non_user_id])
        db.commit_transaction()
        db.start_transaction()  # The test framework expects tests to end in transaction
コード例 #2
0
ファイル: utils.py プロジェクト: rsandrini/dynamic-models
def rename_db_column(model_class, old_name, new_name):
    """ Renomear uma coluna de banco de dados do sensor. """
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.rename_column(table_name, old_name, new_name) 
    logger.debug("Renamed column '%s' to '%s' on %s" % (old_name, new_name, table_name))
    db.commit_transaction()
コード例 #3
0
ファイル: migration.py プロジェクト: 9gix/django-categories
def drop_field(app_name, model_name, field_name):
    """
    Drop the given field from the app's model
    """
    # Table is typically appname_modelname, but it could be different
    #   always best to be sure.
    from .fields import CategoryM2MField, CategoryFKField
    from .settings import FIELD_REGISTRY
    try:
        from south.db import db
    except ImportError:
        raise ImproperlyConfigured(_('%(dependency) must be installed for this command to work') %
                                   {'dependency' : 'South'})
    mdl = models.get_model(app_name, model_name)

    fld = '%s.%s.%s' % (app_name, model_name, field_name)

    if isinstance(FIELD_REGISTRY[fld], CategoryFKField):
        print (_('Dropping ForeignKey %(field_name) from %(model_name)') %
               {'field_name' : field_name, 'model_name' : model_name})
        try:
            db.start_transaction()
            table_name = mdl._meta.db_table
            db.delete_column(table_name, field_name)
            db.commit_transaction()
        except DatabaseError, e:
            db.rollback_transaction()
            raise e
    def backwards(self, orm):
        
        # Adding field 'Dataset.typed_columns'
        db.add_column('panda_dataset', 'typed_columns', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.column_types'
        db.add_column('panda_dataset', 'column_types', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.typed_column_names'
        db.add_column('panda_dataset', 'typed_column_names', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.columns'
        db.add_column('panda_dataset', 'columns', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        db.commit_transaction()     # Commit the first transaction
        db.start_transaction()      # Start the second, committed on completion

        if not db.dry_run:
            for dataset in orm.Dataset.objects.all():
                columns = []
                typed_columns = []
                column_types = []
                typed_column_names = []

                for schema in dataset.column_schema:
                    columns.append(schema['name'])
                    typed_columns.append(schema['indexed'])
                    column_types.append(schema['type'])
                    typed_column_names.append(schema['indexed_name'])

                dataset.columns = columns
                dataset.typed_columns = typed_columns
                dataset.column_types = column_types
                dataset.typed_column_names = typed_column_names
                dataset.save()
コード例 #5
0
ファイル: db.py プロジェクト: TradeHill2011/south
 def test_dry_rename(self):
     """
     Test column renaming while --dry-run is turned on (should do nothing)
     See ticket #65
     """
     cursor = connection.cursor()
     db.create_table("test_drn", [('spam', models.BooleanField(default=False))])
     # Make sure we can select the column
     cursor.execute("SELECT spam FROM test_drn")
     # Rename it
     db.dry_run = True
     db.rename_column("test_drn", "spam", "eggs")
     db.dry_run = False
     cursor.execute("SELECT spam FROM test_drn")
     db.commit_transaction()
     db.start_transaction()
     try:
         cursor.execute("SELECT eggs FROM test_drn")
     except:
         pass
     else:
         self.fail("Dry-renamed new column could be selected!")
     db.rollback_transaction()
     db.delete_table("test_drn")
     db.start_transaction()
    def forwards(self, orm):
        db.start_transaction()
        # geom becomes geom_3d
        db.rename_column('l_t_troncon', 'geom', 'geom_3d')
        db.execute("ALTER TABLE l_t_troncon ALTER COLUMN geom_3d SET DEFAULT NULL;")
        # Create 2D topology
        db.add_column('l_t_troncon', 'geom',
                      self.gf('django.contrib.gis.db.models.fields.LineStringField')(srid=settings.SRID, default=GEOSGeometry('LINESTRING EMPTY'), spatial_index=False),
                      keep_default=False)
        # geom becomes geom_3d
        db.rename_column('e_t_evenement', 'geom', 'geom_3d')
        db.execute("ALTER TABLE e_t_evenement ALTER COLUMN geom_3d SET DEFAULT NULL;")
        # Create 2D topology
        db.add_column('e_t_evenement', 'geom',
                      self.gf('django.contrib.gis.db.models.fields.GeometryField')(srid=settings.SRID, null=True, default=None, spatial_index=False))
        # Switch cadastre to 2D
        db.alter_column('l_t_troncon', 'geom_cadastre', self.gf('django.contrib.gis.db.models.fields.LineStringField')(srid=settings.SRID, null=True, spatial_index=False))
        db.commit_transaction()

        #
        # Data migration
        #
        db.start_transaction()
        db.execute("UPDATE l_t_troncon SET geom = ST_force_2D(geom_3d);")
        db.execute("UPDATE e_t_evenement SET geom = ST_force_2D(geom_3d);")
        db.commit_transaction()
コード例 #7
0
ファイル: db.py プロジェクト: TradeHill2011/south
 def test_alter_constraints(self):
     """
     Tests that going from a PostiveIntegerField to an IntegerField drops
     the constraint on the database.
     """
     # Only applies to databases that support CHECK constraints
     if not db.has_check_constraints:
         return
     # Make the test table
     db.create_table("test_alterc", [
         ('num', models.PositiveIntegerField()),
     ])
     # Add in some test values
     db.execute("INSERT INTO test_alterc (num) VALUES (1)")
     db.execute("INSERT INTO test_alterc (num) VALUES (2)")
     # Ensure that adding a negative number is bad
     db.commit_transaction()
     db.start_transaction()
     try:
         db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
     except:
         db.rollback_transaction()
     else:
         self.fail("Could insert a negative integer into a PositiveIntegerField.")
     # Alter it to a normal IntegerField
     db.alter_column("test_alterc", "num", models.IntegerField())
     # It should now work
     db.execute("INSERT INTO test_alterc (num) VALUES (-3)")
     db.delete_table("test_alterc")
     # We need to match up for tearDown
     db.start_transaction()
    def forwards(self, orm):
        # If there are duplicated documents, we'll have an error when we try to
        # create this index. So to protect against that, we should delete those
        # documents before we create the index.

        # We need to wrap the data migration and alter operation in separate transactions for PostgreSQL
        # See: http://south.readthedocs.org/en/latest/migrationstructure.html#transactions
        try:
            db.start_transaction()
            duplicated_records = Document.objects \
                .values('content_type_id', 'object_id') \
                .annotate(id_count=models.Count('id')) \
                .filter(id_count__gt=1)

            # Delete all but the first document.
            for record in duplicated_records:
                docs = Document.objects \
                    .values_list('id', flat=True) \
                    .filter(
                        content_type_id=record['content_type_id'],
                        object_id=record['object_id'],
                    )[1:]

                docs = list(docs)

                logging.warn('Deleting documents %s' % docs)

                Document.objects.filter(id__in=docs).delete()
            db.commit_transaction()
        except Exception, e:
            db.rollback_transaction()
            raise e
コード例 #9
0
    def forwards(self, orm):
        "Write your forwards methods here."
        db.start_transaction()
        self.set_foreign_key_checks(False)

        # asset
        for model in orm.AssetModel.objects.all():
            new_model = orm.PricingObjectModel.objects.create(
                model_id=model.model_id,
                name=model.name,
                manufacturer=model.manufacturer,
                category=model.category,
                type_id=1,
            )
            model.assetinfo_set.update(model=new_model)

        # tenant
        for model in orm.TenantGroup.objects.all():
            new_model = orm.PricingObjectModel.objects.create(
                name=model.name,
                model_id=model.group_id,
                type_id=3,
            )
            model.tenants.update(model=new_model)

        # move base usages over 100
        self.bumped_auto_increment(101 + orm.BaseUsage.objects.count())
        self.update_usage_id()

        self.set_foreign_key_checks(True)
        db.commit_transaction()
コード例 #10
0
ファイル: utils.py プロジェクト: rsandrini/dynamic-models
def add_necessary_db_columns(model_class):
    """ Cria nova tabela ou colunas pertinentes, se necessário com base no model_class.
         Sem colunas ou dados são renomeados ou removidos.
         Esta opção está disponível no caso de uma exceção de banco de dados ocorre.
    """
    db.start_transaction()

    # Create table if missing
    create_db_table(model_class)

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]

    for field_name, field in fields:
        if field.column not in db_column_names:
            logger.debug("Adding field '%s' to table '%s'" % (field_name, table_name))
            db.add_column(table_name, field_name, field)


     # Algumas colunas necessitam de SQL adiada para ser executado. Este foi recolhido
     # Durante a execução db.add_column ().
    db.execute_deferred_sql()

    db.commit_transaction()
コード例 #11
0
ファイル: utils.py プロジェクト: DarioGT/dynamic-models
def add_necessary_db_columns(model_class):
    """ Creates new table or relevant columns as necessary based on the model_class.
        No columns or data are renamed or removed.
        This is available in case a database exception occurs.
    """
    db.start_transaction()

    # Create table if missing
    create_db_table(model_class)

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]

    for field_name, field in fields:
        if field.column not in db_column_names:
            logger.debug("Adding field '%s' to table '%s'" % (field_name, table_name))
            db.add_column(table_name, field_name, field)


    # Some columns require deferred SQL to be run. This was collected 
    # when running db.add_column().
    db.execute_deferred_sql()

    db.commit_transaction()
コード例 #12
0
ファイル: handlers.py プロジェクト: das-10/django-dynamo
    def _class_prepared_handler(sender, **kwargs):
        """ Signal handler for class_prepared. 
            This will be run for every model, looking for the moment when all
            dependent models are prepared for the first time. It will then run
            the given function, only once.
        """

        sender_app=sender._meta.app_label.lower()+'.'+sender._meta.object_name
        already_prepared=set([sender_app])
        for app,models in app_cache.app_models.items():
            for model_name,model in models.items():
                already_prepared.add(app.lower()+'.'+model_name)
                
        if all([x in already_prepared for x in dependencies]):
            db.start_transaction()
            try:
                # We need to disconnect, otherwise each new dynamo model generation
                # will trigger it and cause a "maximim recursion error"
                class_prepared.disconnect(_class_prepared_handler,weak=False)                
                fn()
            except DatabaseError, message:
                # If tables are  missing altogether, not much we can do
                # until syncdb/migrate is run. "The code must go on" in this 
                # case, without running our function completely. At least
                # database operations will be rolled back.
                db.rollback_transaction()
                # Better connect again
                if message<>'no such table: dynamo_metamodel':
                    class_prepared.connect(_class_prepared_handler, weak=False)
                else:
                    raise
            else:
                db.commit_transaction()
コード例 #13
0
ファイル: utils.py プロジェクト: das-10/django-dynamo
def add_necessary_db_columns(model_class):
    '''
    Takes a Django model class and creates relevant columns as necessary based
    on the model_class. No columns or data are renamed or removed.
    This is available in case a database exception occurs.
    '''

    db.start_transaction()

    # Add field columns if missing
    table_name = model_class._meta.db_table
    fields = _get_fields(model_class)
    db_column_names = [row[0] for row in connection.introspection.get_table_description(connection.cursor(), table_name)]
    for field_name, field in fields:
        if field.column not in db_column_names:
            try:
                db.add_column(table_name, field_name, field)
            except ValueError:
                field.null=True
                db.add_column(table_name, field_name, field)


    # Some columns require deferred SQL to be run. This was collected 
    # when running db.add_column().
    db.execute_deferred_sql()

    db.commit_transaction()
コード例 #14
0
    def forwards(self, orm):
        
        # Removing index on 'CaseActionData', fields ['action_type']
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_action_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseactiondata_user_id_like")

        db.commit_transaction()
        db.start_transaction()

        db.execute("DROP INDEX IF EXISTS sofabed_casedata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_closed_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_doc_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_domain_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_modified_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_opened_by_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_owner_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_type_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_user_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_casedata_version_like")

        db.commit_transaction()
        db.start_transaction()

        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_case_id_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_identifier_like")
        db.execute("DROP INDEX IF EXISTS sofabed_caseindexdata_referenced_type_like")
コード例 #15
0
ファイル: db.py プロジェクト: pombredanne/django-dymo
def create_auto_m2m_tables(model_class):
    " Create tables for ManyToMany fields "
    for f in model_class._meta.many_to_many:
        if f.rel.through:
            try:
                # Django 1.2+
                through = f.rel.through
            except AttributeError:
                # Django 1.1 and below
                through = f.rel.through_model

        if (not f.rel.through) or getattr(through._meta, "auto_created", None):

            # Create the standard implied M2M table
            m2m_table_name = f.m2m_db_table()
            if (connection.introspection.table_name_converter(m2m_table_name) 
                        not in connection.introspection.table_names()):

                db.start_transaction()
                m2m_column_name = f.m2m_column_name()[:-3] # without "_id"
                m2m_reverse_name = f.m2m_reverse_name()[:-3] # without "_id"
                db.create_table(f.m2m_db_table(), (
                    ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
                    (m2m_column_name, models.ForeignKey(model_class, null=False)),
                    (m2m_reverse_name, models.ForeignKey(f.rel.to, null=False))
                ))
                db.create_unique(f.m2m_db_table(), [f.m2m_column_name(), f.m2m_reverse_name()])
                #db.execute_deferred_sql()
                db.commit_transaction()
                logger.debug("Created table '%s'" % m2m_table_name)
コード例 #16
0
    def forwards(self, orm):

        # move some models from maps to layers app
        
        # 0. add some missing fields (not for wfp)
        db.start_transaction()
        try:
            # Adding field 'Layer.bbox_top'
            db.add_column('maps_layer', 'bbox_top', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            # Adding field 'Layer.bbox_bottom'
            db.add_column('maps_layer', 'bbox_bottom', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            # Adding field 'Layer.bbox_left'
            db.add_column('maps_layer', 'bbox_left', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            # Adding field 'Layer.bbox_right'
            db.add_column('maps_layer', 'bbox_right', self.gf('django.db.models.fields.FloatField')(null=True, blank=True), keep_default=False)
            db.commit_transaction()
        except:
            print 'No need to create the fields, they are already there'
            db.rollback_transaction()
            
        # 1. layers_layer moved from maps_layer
        db.rename_table('maps_layer', 'layers_layer') 
        if not db.dry_run:
            orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='layer').update(app_label='layers')
            
        # 2. layers_contactrole moved from maps_contactrole
        db.rename_table('maps_contactrole', 'layers_contactrole') 
        if not db.dry_run:
            orm['contenttypes.contenttype'].objects.filter(app_label='maps', model='contactrole').update(app_label='layers')
    def forwards(self, orm):
        db.start_transaction()
        db.clear_table('lizard_rainapp_rainvalue')
        db.clear_table('lizard_rainapp_completerainvalue')
        db.clear_table('lizard_rainapp_geoobject')
        db.commit_transaction()

        # Adding model 'RainappConfig'
        db.create_table('lizard_rainapp_rainappconfig', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
            ('jdbcsource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['lizard_fewsjdbc.JdbcSource'])),
            ('filter_id', self.gf('django.db.models.fields.CharField')(max_length=128)),
        ))
        db.send_create_signal('lizard_rainapp', ['RainappConfig'])

        # Adding field 'RainValue.config'
        db.add_column('lizard_rainapp_rainvalue', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)

        # Adding field 'CompleteRainValue.config'
        db.add_column('lizard_rainapp_completerainvalue', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)

        # Deleting field 'GeoObject.filterkey'
        db.delete_column('lizard_rainapp_geoobject', 'filterkey')

        # Adding field 'GeoObject.config'
        db.add_column('lizard_rainapp_geoobject', 'config', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['lizard_rainapp.RainappConfig']), keep_default=False)
コード例 #18
0
    def test_alter_column_postgres_multiword(self):
        """
        Tests altering columns with multiple words in Postgres types (issue #125)
        e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py
        """
        db.create_table("test_multiword", [
            ('col_datetime', models.DateTimeField(null=True)),
            ('col_integer', models.PositiveIntegerField(null=True)),
            ('col_smallint', models.PositiveSmallIntegerField(null=True)),
            ('col_float', models.FloatField(null=True)),
        ])
        
        # test if 'double precision' is preserved
        db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True))

        # test if 'CHECK ("%(column)s" >= 0)' is stripped
        db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True))
        db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True))

        # test if 'with timezone' is preserved
        if db.backend_name == "postgres":
            db.start_transaction()
            db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')")
            db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True))
            assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0]
            db.rollback_transaction()

        
        db.delete_table("test_multiword")
    def forwards(self, orm):

        # Removing unique constraint on 'BadgeData', fields ['type', 'name']
        db.delete_unique('askbot_badgedata', ['type', 'name'])

        # Deleting field 'BadgeData.multiple'
        db.delete_column('askbot_badgedata', 'multiple')

        # Deleting field 'BadgeData.description'
        db.delete_column('askbot_badgedata', 'description')


        # Deleting field 'BadgeData.type'
        db.delete_column('askbot_badgedata', 'type')

        # Deleting field 'BadgeData.name'
        db.delete_column('askbot_badgedata', 'name')

        # Changing field 'BadgeData.slug'
        db.alter_column('askbot_badgedata', 'slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50))
        # Adding unique constraint on 'BadgeData', fields ['slug']
        return
        try:#work around the South 0.7.3 bug
            db.start_transaction()
            db.create_unique('askbot_badgedata', ['slug'])
            db.commit_transaction()
        except:
            db.rollback_transaction()
    def forwards(self, orm):
        
        try:
            # Deleting model 'Log'
            db.delete_table('eff_log')
        
            db.add_column('eff_client', 'external_source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['eff.ExternalSource'], null=True), keep_default=False)

            db.add_column('eff_client', 'external_id', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True), keep_default=False)

            db.start_transaction()
            for cli in Client.objects.all():
                cli.external_id = cli.name
                cli_proj = Project.objects.filter(client=cli)
                if cli_proj:
                    ext_src_id = db.execute("select external_source_id from eff_project where id=%s" % cli_proj[0].id)[0][0]
                    cli.external_source = ExternalSource.objects.get(id=ext_src_id)
                else:
                    cli.external_source = ExternalSource.objects.get(name="DotprojectMachinalis")
                cli.save()
            db.commit_transaction()

            # Deleting field 'Project.external_source'
            db.delete_column('eff_project', 'external_source_id')

            # Deleting field 'Project.log'
            db.delete_column('eff_project', 'log_id')
        except:
            pass
コード例 #21
0
ファイル: migration.py プロジェクト: akaihola/django-south
def run_forwards(app, migrations, fake=False, db_dry_run=False, silent=False):
    """
    Runs the specified migrations forwards, in order.
    """
    for migration in migrations:
        app_name = get_app_name(app)
        if not silent:
            print " > %s: %s" % (app_name, migration)
        klass = get_migration(app, migration)

        if fake:
            if not silent:
                print "   (faked)"
        else:
            if db_dry_run:
                db.dry_run = True
                
            db.start_transaction()
            try:
                klass().forwards()
                db.execute_deferred_sql()
            except:
                db.rollback_transaction()
                raise
            else:
                db.commit_transaction()

        if not db_dry_run:
            # Record us as having done this
            record = MigrationHistory.for_migration(app_name, migration)
            record.applied = datetime.datetime.utcnow()
            record.save()
コード例 #22
0
 def drop_db_model(self, django_class):
     """ Migrate the DB to remove a single model. """
     # Drop the table. Also force a commit, or we'll have trouble with pending triggers in future operations.
     table_name = django_class._meta.db_table
     db.start_transaction()
     db.delete_table(table_name)
     db.commit_transaction()
コード例 #23
0
ファイル: migration.py プロジェクト: akaihola/django-south
def run_backwards(app, migrations, ignore=[], fake=False, db_dry_run=False, silent=False):
    """
    Runs the specified migrations backwards, in order, skipping those
    migrations in 'ignore'.
    """
    for migration in migrations:
        if migration not in ignore:
            app_name = get_app_name(app)
            if not silent:
                print " < %s: %s" % (app_name, migration)
            klass = get_migration(app, migration)
            if fake:
                if not silent:
                    print "   (faked)"
            else:
                if db_dry_run:
                    db.dry_run = True
                
                db.start_transaction()
                try:
                    klass().backwards()
                    db.execute_deferred_sql()
                except:
                    db.rollback_transaction()
                    raise
                else:
                    db.commit_transaction()

            if not db_dry_run:
                # Record us as having not done this
                record = MigrationHistory.for_migration(app_name, migration)
                record.delete()
    def forwards(self, orm):
        
        # Adding field 'Dataset.column_types'
        db.add_column('panda_dataset', 'column_types', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        # Adding field 'Dataset.typed_column_names'
        db.add_column('panda_dataset', 'typed_column_names', self.gf('panda.fields.JSONField')(default=None, null=True), keep_default=False)

        db.commit_transaction()     # Commit the first transaction
        db.start_transaction()      # Start the second, committed on completion

        if not db.dry_run:
            for dataset in orm.Dataset.objects.all():
                if dataset.initial_upload:
                    dataset.column_types = dataset.initial_upload.guessed_types

                    # Account for bug where columns sometimes were not copied across
                    if not dataset.columns:
                        dataset.columns = dataset.initial_upload.columns
                else:
                    dataset.column_types = ['unicode' for c in dataset.columns]

                dataset.typed_column_names = [None for c in dataset.columns]

                dataset.save()
コード例 #25
0
ファイル: __init__.py プロジェクト: Code4SA/pombola
    def forwards(self, orm):

        # Do the deletes in a separate transaction, as database errors when
        # deleting a table that does not exist would cause a transaction to be
        # rolled back
        db.start_transaction()

        ContentType.objects.filter(app_label=self.app_name).delete()

        # Remove the entries from South's tables as we don't want to leave
        # incorrect entries in there.
        MigrationHistory.objects.filter(app_name=self.app_name).delete()

        # Commit the deletes to the various tables.
        db.commit_transaction()

        for table in self.tables:

            # Check to see if this table exists. db.execute will return
            # something like [(n, )] where n is the count(*)
            table_exists = db.execute(
                "SELECT count(*) from pg_tables where tablename = '{0}'".format(table)
            )
            if table_exists and table_exists[0][0]:
                db.delete_table(table)
コード例 #26
0
ファイル: utils.py プロジェクト: DarioGT/dynamic-models
def rename_db_column(model_class, old_name, new_name):
    """ Rename a sensor's database column. """
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.rename_column(table_name, old_name, new_name) 
    logger.debug("Renamed column '%s' to '%s' on %s" % (old_name, new_name, table_name))
    db.commit_transaction()
 def forwards(self, orm):
     if not db.dry_run:
         for token in orm['facebook_auth.UserToken'].objects.all():
             orm['facebook_auth.UserToken'].objects.filter(token=token.token, id__gt=token.id).delete()
     db.commit_transaction()
     db.start_transaction()
     # Adding unique constraint on 'UserToken', fields ['token']
     db.create_unique(u'facebook_auth_usertoken', ['token'])
 def forwards(self, orm):
     # Adding index on 'Group', fields ['project', 'first_release']
     if is_postgres():
         db.commit_transaction()
         db.execute("CREATE INDEX CONCURRENTLY sentry_groupedmessage_project_id_31335ae34c8ef983 ON sentry_groupedmessage (project_id, first_release_id)")
         db.start_transaction()
     else:
         db.create_index('sentry_groupedmessage', ['project_id', 'first_release_id'])
コード例 #29
0
ファイル: utils.py プロジェクト: das-10/django-dynamo
def delete_db_table(model_class):
    '''
    Takes a Django model class and deletes the database table.
    '''
    table_name = model_class._meta.db_table
    db.start_transaction()
    db.delete_table(table_name)
    db.commit_transaction()
コード例 #30
0
ファイル: 0024_bumped_id_ci_type.py プロジェクト: 4i60r/ralph
 def backwards(self, orm):
     """Remove new ci types"""
     db.start_transaction()
     db.execute('DELETE FROM cmdb_cilayer_connected_types WHERE citype_id > 10 and citype_id < 1000;')
     db.execute('DELETE FROM cmdb_ciattribute_ci_types WHERE citype_id > 10 and citype_id < 1000;')
     db.execute('DELETE FROM cmdb_ci WHERE type_id > 10 and type_id < 1000;')
     db.execute('DELETE FROM cmdb_citype WHERE id > 10 and id < 1000;')
     db.commit_transaction()
コード例 #31
0
ファイル: 0008_copy_objects.py プロジェクト: fossabot/noc
    def forwards(self):
        def qget(map, key):
            if key is None:
                return None
            return map[key]

        db.start_transaction()
        # Fill administrative domains
        location2domain = {}
        for id, name, description in db.execute(
                "SELECT id,name,description FROM cm_objectlocation"):
            db.execute(
                "INSERT INTO sa_administrativedomain(name,description) VALUES(%s,%s)",
                [name, description])
            location2domain[id] = db.execute(
                "SELECT id FROM sa_administrativedomain WHERE name=%s",
                [name])[0][0]
        # Fill groups
        category2group = {}
        for id, name, description in db.execute(
                "SELECT id,name,description FROM cm_objectcategory"):
            db.execute(
                "INSERT INTO sa_objectgroup(name,description) VALUES(%s,%s)",
                [name, description])
            category2group[id] = db.execute(
                "SELECT id FROM sa_objectgroup WHERE name=%s", [name])[0][0]
        #
        ManagedObject = db.mock_model(model_name='ManagedObject',
                                      db_table='sa_managedobject',
                                      db_tablespace='',
                                      pk_field_name='id',
                                      pk_field_type=models.AutoField)
        db.add_column("cm_config", "managed_object",
                      models.ForeignKey(ManagedObject, null=True))

        # Move objects
        for id,repo_path,activator_id,profile_name,scheme,address,port,user,password,super_password,remote_path,location_id,trap_source_ip,trap_community\
            in db.execute("SELECT id,repo_path,activator_id,profile_name,scheme,address,port,\"user\",password,super_password,remote_path,location_id,trap_source_ip,trap_community FROM cm_config"):
            name = os.path.basename(repo_path)
            db.execute(
                "INSERT INTO sa_managedobject(name,repo_path,activator_id,profile_name,scheme,address,port,\"user\",password,super_password,remote_path,administrative_domain_id,trap_source_ip,trap_community) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
                [
                    name, repo_path, activator_id, profile_name, scheme,
                    address, port, user, password, super_password, remote_path,
                    location2domain[location_id], trap_source_ip,
                    trap_community
                ])
            new_id = db.execute(
                "SELECT id FROM sa_managedobject WHERE name=%s", [name])[0][0]
            for object_id, objectcategory_id in db.execute(
                    "SELECT object_id,objectcategory_id FROM cm_object_categories WHERE object_id=%s",
                [id]):
                db.execute(
                    "INSERT INTO sa_managedobject_groups(manageobject_id,objectgroup_id) VALUES(%s,%s)",
                    [new_id, category2group[objectcategory_id]])
            db.execute("UPDATE cm_config SET managed_object_id=%s WHERE id=%s",
                       [new_id, id])
        # Move user access
        for category_id, location_id, user_id in db.execute(
                "SELECT category_id,location_id,user_id FROM cm_objectaccess"):
            db.execute(
                "INSERT INTO sa_useraccess(user_id,administrative_domain_id,group_id) VALUES(%s,%s,%s)",
                [
                    user_id,
                    qget(location2domain, location_id),
                    qget(category2group, category_id)
                ])
        db.execute(
            "ALTER TABLE cm_config ALTER managed_object_id SET NOT NULL")

        # Migrate ObjectNotify
        ObjectGroup = db.mock_model(model_name='ObjectGroup',
                                    db_table='sa_objectgroup',
                                    db_tablespace='',
                                    pk_field_name='id',
                                    pk_field_type=models.AutoField)
        AdministrativeDomain = db.mock_model(
            model_name='AdministrativeDomain',
            db_table='sa_administrativedomain',
            db_tablespace='',
            pk_field_name='id',
            pk_field_type=models.AutoField)

        db.add_column(
            "cm_objectnotify", "administrative_domain",
            models.ForeignKey(AdministrativeDomain,
                              verbose_name="Administrative Domain",
                              blank=True,
                              null=True))
        db.add_column(
            "cm_objectnotify", "group",
            models.ForeignKey(ObjectGroup,
                              verbose_name="Group",
                              blank=True,
                              null=True))
        for id, category_id, location_id in db.execute(
                "SELECT id,category_id,location_id FROM cm_objectnotify"):
            db.execute(
                "UPDATE cm_objectnotify SET administrative_domain_id=%s,group_id=%s WHERE id=%s",
                [
                    qget(location2domain, location_id),
                    qget(category2group, category_id), id
                ])
        db.commit_transaction()
コード例 #32
0
    fld = '%s.%s.%s' % (app_name, model_name, field_name)

    if isinstance(registry._field_registry[fld], CategoryFKField):
        print(
            _('Dropping ForeignKey %(field_name)s from %(model_name)s') % {
                'field_name': field_name,
                'model_name': model_name
            })
        try:
            db.start_transaction()
            table_name = mdl._meta.db_table
            db.delete_column(table_name, field_name)
            db.commit_transaction()
        except DatabaseError, e:
            db.rollback_transaction()
            raise e
    elif isinstance(registry._field_registry[fld], CategoryM2MField):
        print(
            _('Dropping Many2Many table between %(model_name)s and %(category_table)s'
              ) % {
                  'model_name': model_name,
                  'category_table': 'category'
              })
        try:
            db.start_transaction()
            db.delete_table(table_name, cascade=False)
            db.commit_transaction()
        except DatabaseError, e:
            db.rollback_transaction()
            raise e
コード例 #33
0
def migrate_app(sender,
                app,
                created_models=None,
                verbosity=False,
                *args,
                **kwargs):
    """
    Migrate all models of this app registered
    """
    from .fields import CategoryM2MField, CategoryFKField
    from .models import Category
    from .registration import registry
    import sys
    import StringIO

    org_stderror = sys.stderr
    sys.stderr = StringIO.StringIO()  # south will print out errors to stderr
    try:
        from south.db import db
    except ImportError:
        raise ImproperlyConfigured(
            _('%(dependency)s must be installed for this command to work') %
            {'dependency': 'South'})
    # pull the information from the registry
    if isinstance(app, basestring):
        app_name = app
    else:
        app_name = app.__name__.split('.')[-2]

    fields = [
        fld for fld in registry._field_registry.keys()
        if fld.startswith(app_name)
    ]
    # call the south commands to add the fields/tables
    for fld in fields:
        app_name, model_name, field_name = fld.split('.')

        # Table is typically appname_modelname, but it could be different
        #   always best to be sure.
        mdl = models.get_model(app_name, model_name)

        if isinstance(registry._field_registry[fld], CategoryFKField):
            try:
                db.start_transaction()
                table_name = mdl._meta.db_table
                registry._field_registry[fld].default = -1
                db.add_column(table_name,
                              field_name,
                              registry._field_registry[fld],
                              keep_default=False)
                db.commit_transaction()
                if verbosity:
                    print(
                        _('Added ForeignKey %(field_name)s to %(model_name)s')
                        % {
                            'field_name': field_name,
                            'model_name': model_name
                        })
            except DatabaseError, e:
                db.rollback_transaction()
                if "already exists" in str(e):
                    if verbosity > 1:
                        print(
                            _('ForeignKey %(field_name)s to %(model_name)s already exists'
                              ) % {
                                  'field_name': field_name,
                                  'model_name': model_name
                              })
                else:
                    sys.stderr = org_stderror
                    raise e
        elif isinstance(registry._field_registry[fld], CategoryM2MField):
            table_name = '%s_%s' % (mdl._meta.db_table, 'categories')
            try:
                db.start_transaction()
                db.create_table(
                    table_name,
                    (('id',
                      models.AutoField(verbose_name='ID',
                                       primary_key=True,
                                       auto_created=True)),
                     (model_name, models.ForeignKey(mdl, null=False)),
                     ('category', models.ForeignKey(Category, null=False))))
                db.create_unique(table_name,
                                 ['%s_id' % model_name, 'category_id'])
                db.commit_transaction()
                if verbosity:
                    print(
                        _('Added Many2Many table between %(model_name)s and %(category_table)s'
                          ) % {
                              'model_name': model_name,
                              'category_table': 'category'
                          })
            except DatabaseError, e:
                db.rollback_transaction()
                if "already exists" in str(e):
                    if verbosity > 1:
                        print(
                            _('Many2Many table between %(model_name)s and %(category_table)s already exists'
                              ) % {
                                  'model_name': model_name,
                                  'category_table': 'category'
                              })
                else:
                    sys.stderr = org_stderror
                    raise e
コード例 #34
0
ファイル: migration.py プロジェクト: dossec/myblog-1
def run_migrations(toprint,
                   torun,
                   recorder,
                   app,
                   migrations,
                   fake=False,
                   db_dry_run=False,
                   verbosity=0):
    """
    Runs the specified migrations forwards/backwards, in order.
    """
    for migration in migrations:
        app_name = get_app_name(app)
        if verbosity:
            print toprint % (app_name, migration)

        # Get migration class
        klass = get_migration(app, migration)
        # Find its predecessor, and attach the ORM from that as prev_orm.
        all_names = get_migration_names(app)
        idx = all_names.index(migration)
        # First migration? The 'previous ORM' is empty.
        if idx == 0:
            klass.prev_orm = FakeORM(None, app)
        else:
            klass.prev_orm = get_migration(app, all_names[idx - 1]).orm

        # If this is a 'fake' migration, do nothing.
        if fake:
            if verbosity:
                print "   (faked)"

        # OK, we should probably do something then.
        else:
            runfunc = getattr(klass(), torun)
            args = inspect.getargspec(runfunc)

            # Get the correct ORM.
            if torun == "forwards":
                orm = klass.orm
            else:
                orm = klass.prev_orm

            db.current_orm = orm

            # If the database doesn't support running DDL inside a transaction
            # *cough*MySQL*cough* then do a dry run first.
            if not db.has_ddl_transactions or db_dry_run:
                if not (hasattr(klass, "no_dry_run") and klass.no_dry_run):
                    db.dry_run = True
                    # Only hide SQL if this is an automatic dry run.
                    if not db.has_ddl_transactions:
                        db.debug, old_debug = False, db.debug
                    pending_creates = db.get_pending_creates()
                    db.start_transaction()
                    try:
                        if len(args[0]) == 1:  # They don't want an ORM param
                            runfunc()
                        else:
                            runfunc(orm)
                            db.rollback_transactions_dry_run()
                    except:
                        traceback.print_exc()
                        print " ! Error found during dry run of migration! Aborting."
                        return False
                    if not db.has_ddl_transactions:
                        db.debug = old_debug
                    db.clear_run_data(pending_creates)
                    db.dry_run = False
                elif db_dry_run:
                    print " - Migration '%s' is marked for no-dry-run." % migration
                # If they really wanted to dry-run, then quit!
                if db_dry_run:
                    return

            if db.has_ddl_transactions:
                db.start_transaction()
            try:
                if len(args[0]) == 1:  # They don't want an ORM param
                    runfunc()
                else:
                    runfunc(orm)
                db.execute_deferred_sql()
            except:
                if db.has_ddl_transactions:
                    db.rollback_transaction()
                    raise
                else:
                    traceback.print_exc()
                    print " ! Error found during real run of migration! Aborting."
                    print
                    print " ! Since you have a database that does not support running"
                    print " ! schema-altering statements in transactions, we have had to"
                    print " ! leave it in an interim state between migrations."
                    if torun == "forwards":
                        print
                        print " ! You *might* be able to recover with:"
                        db.debug = db.dry_run = True
                        if len(args[0]) == 1:
                            klass().backwards()
                        else:
                            klass().backwards(klass.prev_orm)
                    print
                    print " ! The South developers regret this has happened, and would"
                    print " ! like to gently persuade you to consider a slightly"
                    print " ! easier-to-deal-with DBMS."
                    return False
            else:
                if db.has_ddl_transactions:
                    db.commit_transaction()

        if not db_dry_run:
            # Record us as having done this
            recorder(app_name, migration)
            if not fake:
                # Send a signal saying it ran
                # Actually, don't - we're implementing this properly in 0.7
                #ran_migration.send(None, app=app_name, migration=migration, method=torun)
                pass
コード例 #35
0
ファイル: db.py プロジェクト: buypolarbear/childcare-2
    def test_unique(self):
        """
        Tests creating/deleting unique constraints.
        """
        db.create_table("test_unique2", [
            ('id', models.AutoField(primary_key=True)),
        ])
        db.create_table("test_unique", [
            ('spam', models.BooleanField(default=False)),
            ('eggs', models.IntegerField()),
            ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))),
        ])
        db.execute_deferred_sql()
        # Add a constraint
        db.create_unique("test_unique", ["spam"])
        db.execute_deferred_sql()
        # Shouldn't do anything during dry-run
        db.dry_run = True
        db.delete_unique("test_unique", ["spam"])
        db.dry_run = False
        db.delete_unique("test_unique", ["spam"])
        db.create_unique("test_unique", ["spam"])
        # Special preparations for Sql Server
        if db.backend_name == "pyodbc":
            db.execute("SET IDENTITY_INSERT test_unique2 ON;")
        db.execute("INSERT INTO test_unique2 (id) VALUES (1)")
        db.execute("INSERT INTO test_unique2 (id) VALUES (2)")
        db.commit_transaction()
        db.start_transaction()

        
        # Test it works
        TRUE = (True,)
        FALSE = (False,)
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE)
        try:
            db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 2, 1)", FALSE)
        except:
            db.rollback_transaction()
        else:
            self.fail("Could insert non-unique item.")
        
        # Drop that, add one only on eggs
        db.delete_unique("test_unique", ["spam"])
        db.execute("DELETE FROM test_unique")
        db.create_unique("test_unique", ["eggs"])
        db.start_transaction()
        
        # Test similarly
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE)
        try:
            db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", TRUE)
        except:
            db.rollback_transaction()
        else:
            self.fail("Could insert non-unique item.")
        
        # Drop those, test combined constraints
        db.delete_unique("test_unique", ["eggs"])
        db.execute("DELETE FROM test_unique")
        db.create_unique("test_unique", ["spam", "eggs", "ham_id"])
        db.start_transaction()
        # Test similarly
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", FALSE)
        try:
            db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE)
        except:
            db.rollback_transaction()
        else:
            self.fail("Could insert non-unique pair.")
        db.delete_unique("test_unique", ["spam", "eggs", "ham_id"])
        db.start_transaction()
コード例 #36
0
ファイル: db.py プロジェクト: lmorchard/whuru
 def setUp(self):
     db.debug = False
     db.clear_deferred_sql()
     db.start_transaction()
    def forwards(self, orm):
        """
        Added custom transaction processing for transactional DBMS.
        If a DDL operation fails, the entire transaction fails and all future commands are ignored.
        """

        # Adding model 'OozieStreamingAction'
        db.create_table('jobsub_ooziestreamingaction', (
            ('oozieaction_ptr',
             self.gf('django.db.models.fields.related.OneToOneField')(
                 to=orm['jobsub.OozieAction'], unique=True, primary_key=True)),
            ('files', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('mapper',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('reducer',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('job_properties',
             self.gf('django.db.models.fields.TextField')(default='[]')),
            ('archives', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
        ))
        db.send_create_signal('jobsub', ['OozieStreamingAction'])

        # Adding model 'OozieAction'
        db.create_table('jobsub_oozieaction', (
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('action_type',
             self.gf('django.db.models.fields.CharField')(max_length=64)),
        ))
        db.send_create_signal('jobsub', ['OozieAction'])

        # Adding model 'OozieDesign'
        db.create_table('jobsub_ooziedesign', (
            ('description', self.gf('django.db.models.fields.CharField')(
                max_length=1024, blank=True)),
            ('last_modified', self.gf('django.db.models.fields.DateTimeField')(
                auto_now=True, blank=True)),
            ('owner', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['auth.User'])),
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('root_action',
             self.gf('django.db.models.fields.related.ForeignKey')(
                 to=orm['jobsub.OozieAction'])),
            ('name',
             self.gf('django.db.models.fields.CharField')(max_length=64)),
        ))
        db.send_create_signal('jobsub', ['OozieDesign'])

        # Adding model 'JobHistory'
        db.create_table('jobsub_jobhistory', (
            ('owner', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['auth.User'])),
            ('submission_date',
             self.gf('django.db.models.fields.DateTimeField')(auto_now=True,
                                                              blank=True)),
            ('design', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['jobsub.OozieDesign'])),
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('job_id',
             self.gf('django.db.models.fields.CharField')(max_length=128)),
        ))
        db.send_create_signal('jobsub', ['JobHistory'])

        # Adding model 'OozieMapreduceAction'
        db.create_table('jobsub_ooziemapreduceaction', (
            ('oozieaction_ptr',
             self.gf('django.db.models.fields.related.OneToOneField')(
                 to=orm['jobsub.OozieAction'], unique=True, primary_key=True)),
            ('files', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('jar_path',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('archives', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('job_properties',
             self.gf('django.db.models.fields.TextField')(default='[]')),
        ))
        db.send_create_signal('jobsub', ['OozieMapreduceAction'])

        # Adding model 'OozieJavaAction'
        db.create_table('jobsub_ooziejavaaction', (
            ('oozieaction_ptr',
             self.gf('django.db.models.fields.related.OneToOneField')(
                 to=orm['jobsub.OozieAction'], unique=True, primary_key=True)),
            ('files', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('jar_path',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('java_opts', self.gf('django.db.models.fields.CharField')(
                max_length=256, blank=True)),
            ('args', self.gf('django.db.models.fields.TextField')(blank=True)),
            ('job_properties',
             self.gf('django.db.models.fields.TextField')(default='[]')),
            ('archives', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('main_class',
             self.gf('django.db.models.fields.CharField')(max_length=256)),
        ))
        db.send_create_signal('jobsub', ['OozieJavaAction'])

        # Adding field 'CheckForSetup.setup_level'
        db.add_column(
            'jobsub_checkforsetup',
            'setup_level',
            self.gf('django.db.models.fields.IntegerField')(default=0),
            keep_default=False)

        # South commits transaction at end of forward migration.
        db.start_transaction()
コード例 #38
0
def enable_triggers():
    from south.db import db
    if db.backend_name == "postgres":
        db.start_transaction()
        db.execute_many(PG_ENABLE_TRIGGERS)
        db.commit_transaction()
コード例 #39
0
def reset_sequences():
    from south.db import db
    if db.backend_name == "postgres":
        db.start_transaction()
        db.execute_many(PG_SEQUENCE_RESETS)
        db.commit_transaction()
コード例 #40
0
def reindex_fts():
    from south.db import db
    if db.backend_name == "postgres":
        db.start_transaction()
        db.execute_many("UPDATE forum_noderevision set id = id WHERE TRUE;")
        db.commit_transaction()
コード例 #41
0
def enable_triggers():
    if db.backend_name == "postgres":
        db.start_transaction()
        db.execute_many(commands.PG_ENABLE_TRIGGERS)
        db.commit_transaction()
コード例 #42
0
def reset_sequences():
    if db.backend_name == "postgres":
        db.start_transaction()
        db.execute_many(commands.PG_SEQUENCE_RESETS)
        db.commit_transaction()
コード例 #43
0
def start_import(fname, tag_merge, user):

    start_time = datetime.datetime.now()
    steps = [s for s in FILE_HANDLERS]

    with open(os.path.join(TMP_FOLDER, 'backup.inf'), 'r') as inffile:
        inf = ConfigParser.SafeConfigParser()
        inf.readfp(inffile)

        state = dict([(s['id'], {
            'status': _('Queued'),
            'count': int(inf.get(META_INF_SECTION, s['id'])),
            'parsed': 0
        }) for s in steps] +
                     [('overall', {
                         'status': _('Starting'),
                         'count': int(inf.get(META_INF_SECTION, 'overall')),
                         'parsed': 0
                     })])

    full_state = dict(running=True, state=state, time_started="")

    def set_state():
        full_state['time_started'] = diff_date(start_time)
        cache.set(CACHE_KEY, full_state)

    set_state()

    def ping_state(name):
        state[name]['parsed'] += 1
        state['overall']['parsed'] += 1
        set_state()

    data = {'is_merge': True, 'tag_merge': tag_merge}

    def run(fn, name):
        def ping():
            ping_state(name)

        state['overall']['status'] = _('Importing %s') % s['name']
        state[name]['status'] = _('Importing')

        fn(TMP_FOLDER, user, ping, data)

        state[name]['status'] = _('Done')

        set_state()

        return fname

    #dump = tarfile.open(fname, 'r')
    #dump.extractall(TMP_FOLDER)

    try:

        disable_triggers()
        db.start_transaction()

        for h in FILE_HANDLERS:
            run(h['fn'], h['id'])

        db.commit_transaction()
        enable_triggers()

        settings.MERGE_MAPPINGS.set_value(
            dict(merged_nodes=data['nodes_map'],
                 merged_users=data['users_map']))

        reset_sequences()
    except Exception, e:
        full_state['running'] = False
        full_state['errors'] = "%s: %s" % (e.__class__.__name__, unicode(e))
        set_state()

        import traceback
        logging.error("Error executing xml import: \n %s" %
                      (traceback.format_exc()))
コード例 #44
0
    def test_unique(self):
        """
        Tests creating/deleting unique constraints.
        """

        # SQLite backend doesn't support this yet.
        if db.backend_name == "sqlite3":
            return

        db.create_table("test_unique2", [
            ('id', models.AutoField(primary_key=True)),
        ])
        db.create_table("test_unique", [
            ('spam', models.BooleanField(default=False)),
            ('eggs', models.IntegerField()),
            ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))),
        ])
        # Add a constraint
        db.create_unique("test_unique", ["spam"])
        # Shouldn't do anything during dry-run
        db.dry_run = True
        db.delete_unique("test_unique", ["spam"])
        db.dry_run = False
        db.delete_unique("test_unique", ["spam"])
        db.create_unique("test_unique", ["spam"])
        db.commit_transaction()
        db.start_transaction()

        # Test it works
        db.execute("INSERT INTO test_unique2 (id) VALUES (1)")
        db.execute("INSERT INTO test_unique2 (id) VALUES (2)")
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (false, 1, 2)")
        try:
            db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 2, 1)")
        except:
            db.rollback_transaction()
        else:
            self.fail("Could insert non-unique item.")

        # Drop that, add one only on eggs
        db.delete_unique("test_unique", ["spam"])
        db.execute("DELETE FROM test_unique")
        db.create_unique("test_unique", ["eggs"])
        db.start_transaction()

        # Test similarly
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (false, 1, 2)")
        try:
            db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 1, 1)")
        except:
            db.rollback_transaction()
        else:
            self.fail("Could insert non-unique item.")

        # Drop those, test combined constraints
        db.delete_unique("test_unique", ["eggs"])
        db.execute("DELETE FROM test_unique")
        db.create_unique("test_unique", ["spam", "eggs", "ham_id"])
        db.start_transaction()
        # Test similarly
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
        db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (false, 1, 1)")
        try:
            db.execute("INSERT INTO test_unique (spam, eggs, ham_id) VALUES (true, 0, 1)")
        except:
            db.rollback_transaction()
        else:
            self.fail("Could insert non-unique pair.")
        db.delete_unique("test_unique", ["spam", "eggs", "ham_id"])
        db.start_transaction()
コード例 #45
0
    def forwards(self, orm):
        if orm.CourseSection.objects.exists():
            raise Exception('This migration must only be run on a legacy ' \
                'database that has no existing CourseSections!')

        db.start_transaction()
        courses = orm.Course.objects.all()
        for course in courses:
            if course.credits is None:
                course.credits = 0
            course_section = orm.CourseSection()
            # If the new CourseSection's PK is the same, it makes it much easier
            # to migrate other models that reference Course
            course_section.pk = course.pk
            course_section.course = course
            course.is_active = course.active
            course_section.is_active = course.is_active
            course_section.name = course.fullname
            course_section.last_grade_submission = course.last_grade_submission
            course_section.save()  # before we can do M2M stuff
            # South doesn't seem to like this; throws up FieldErrors
            # for marking_period in course.marking_period.all():
            for course_marking_period in course.marking_period.through.objects.filter(
                    course=course):
                marking_period = orm.MarkingPeriod.objects.get(
                    pk=course_marking_period.markingperiod_id)
                course_section.marking_period.add(marking_period)
            for course_meet in course.coursemeet_set.all():
                course_meet.course_section = course_section
                course_meet.save()
            if course.teacher is not None:
                orm.CourseSectionTeacher.objects.get_or_create(
                    course_section=course_section,
                    is_primary=True,
                    teacher=course.teacher)
            # South doesn't seem to like this either
            #for secondary_teacher in course.secondary_teachers.all():
            for course_secondary_teacher in course.secondary_teachers.through.objects.filter(
                    course=course):
                secondary_teacher = orm['sis.Faculty'].objects.get(
                    pk=course_secondary_teacher.faculty_id)
                obj, created = orm.CourseSectionTeacher.objects.get_or_create(
                    course_section=course_section, teacher=secondary_teacher)
                # try to handle contradictory duplicates gracefully
                if created:
                    obj.is_primary = False
                    obj.save()
                else:
                    print '{} ({}) / {} ({}) appears to be both a primary ' \
                        'and secondary teacher for course {} ({})!'.format(
                            secondary_teacher.username,
                            secondary_teacher.pk,
                            course.teacher.username,
                            course.teacher.pk, course.fullname,
                            course.pk
                    )
            course.courseenrollment_set.filter(role__iexact='Teacher').delete()
            for course_enrollment in course.courseenrollment_set.all():
                course_enrollment.course_section = course_section
                course_enrollment.save()
            course_section.save()
            course.save()
        db.commit_transaction()

        # Now that all Faculty CourseEnrollments have been migrated, we can
        # restrict CourseEnrollment to Students
        # Changing field 'CourseEnrollment.user'
        db.alter_column(
            u'schedule_courseenrollment', 'user_id',
            self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['sis.Student']))
コード例 #46
0
    def forwards(self, orm):
        """
        This migration has been customized to support upgrades from Cloudera
        Enterprise 3.5, as well as Hue 1.2
        """
        # These will be removed if upgrading from a previous version of
        # Cloudera Enterprise
        if 'userman_groupadministrator' in connection.introspection.table_names(
        ):
            db.delete_table('userman_groupadministrator')
        if 'userman_grouprelations' in connection.introspection.table_names():
            db.delete_table('userman_grouprelations')

        if 'userman_userprofile' in connection.introspection.table_names():
            db.rename_table('userman_userprofile', 'useradmin_userprofile')
            db.delete_column('useradmin_userprofile', 'primary_group_id')
            db.create_index('useradmin_userprofile', ['user_id'])

            db.alter_column(
                'useradmin_userprofile', 'creation_method',
                models.CharField(editable=True,
                                 null=False,
                                 max_length=64,
                                 default=UserProfile.CreationMethod.HUE))
            for up in UserProfile.objects.all():
                # From when CreationMethod was not an Enum
                # LDAP == 1
                # HUE == 0
                if up.creation_method == '1':
                    up.creation_method = UserProfile.CreationMethod.EXTERNAL.name
                elif up.creation_method == '0':
                    up.creation_method = UserProfile.CreationMethod.HUE
                up.save()
        else:
            # Adding model 'UserProfile'
            db.create_table('useradmin_userprofile', (
                ('home_directory',
                 self.gf('django.db.models.fields.CharField')(max_length=1024,
                                                              null=True)),
                ('id', self.gf('django.db.models.fields.AutoField')(
                    primary_key=True)),
                ('user', self.gf('django.db.models.fields.related.ForeignKey')(
                    to=orm['auth.User'], unique=True)),
            ))
            db.commit_transaction()
            db.start_transaction()
            db.send_create_signal('useradmin', ['UserProfile'])

        if 'userman_grouppermission' in connection.introspection.table_names():
            db.rename_table('userman_grouppermission',
                            'useradmin_grouppermission')
            db.rename_column('useradmin_grouppermission',
                             'desktop_permission_id', 'hue_permission_id')
            db.create_index('useradmin_grouppermission', ['group_id'])
            db.create_index('useradmin_grouppermission', ['hue_permission_id'])
        else:
            # Adding model 'GroupPermission'
            db.create_table('useradmin_grouppermission', (
                ('hue_permission',
                 self.gf('django.db.models.fields.related.ForeignKey')(
                     to=orm['useradmin.HuePermission'])),
                ('group',
                 self.gf('django.db.models.fields.related.ForeignKey')(
                     to=orm['auth.Group'])),
                ('id', self.gf('django.db.models.fields.AutoField')(
                    primary_key=True)),
            ))
            db.commit_transaction()
            db.start_transaction()
            db.send_create_signal('useradmin', ['GroupPermission'])

        if 'userman_desktoppermission' in connection.introspection.table_names(
        ):
            db.rename_table('userman_desktoppermission',
                            'useradmin_huepermission')
        else:
            # Adding model 'HuePermission'
            db.create_table('useradmin_huepermission', (
                ('action',
                 self.gf('django.db.models.fields.CharField')(max_length=100)),
                ('app',
                 self.gf('django.db.models.fields.CharField')(max_length=30)),
                ('id', self.gf('django.db.models.fields.AutoField')(
                    primary_key=True)),
                ('description',
                 self.gf('django.db.models.fields.CharField')(max_length=255)),
            ))
            db.commit_transaction()
            db.start_transaction()
            db.send_create_signal('useradmin', ['HuePermission'])
コード例 #47
0
ファイル: db.py プロジェクト: buypolarbear/childcare-2
    def test_alter_unique(self):
        """
        Tests that unique constraints are not affected when
        altering columns (that's handled by create_/delete_unique)
        """
        db.create_table("test_alter_unique", [
            ('spam', models.IntegerField()),
            ('eggs', models.IntegerField(unique=True)),
        ])
        db.execute_deferred_sql()
        
        # Make sure the unique constraint is created
        db.execute('INSERT INTO test_alter_unique (spam, eggs) VALUES (0, 42)')
        db.commit_transaction()
        db.start_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)")
        except:
            pass
        else:
            self.fail("Could insert the same integer twice into a unique field.")
        db.rollback_transaction()

        # Alter without unique=True (should not affect anything)
        db.alter_column("test_alter_unique", "eggs", models.IntegerField())

        # Insertion should still fail
        db.start_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique (spam, eggs) VALUES (1, 42)")
        except:
            pass
        else:
            self.fail("Could insert the same integer twice into a unique field after alter_column with unique=False.")
        db.rollback_transaction()
        
        # Delete the unique index/constraint
        if db.backend_name != "sqlite3":
            db.delete_unique("test_alter_unique", ["eggs"])
        db.delete_table("test_alter_unique")
        db.start_transaction()

        # Test multi-field constraint
        db.create_table("test_alter_unique2", [
            ('spam', models.IntegerField()),
            ('eggs', models.IntegerField()),
        ])
        db.create_unique('test_alter_unique2', ('spam', 'eggs'))
        db.execute_deferred_sql()
        db.execute('INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)')
        db.commit_transaction()
        # Verify that constraint works
        db.start_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)")
        except:
            self.fail("Looks like multi-field unique constraint applied to only one field.")
        db.start_transaction()
        db.rollback_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)")
        except:
            self.fail("Looks like multi-field unique constraint applied to only one field.")
        db.rollback_transaction()
        db.start_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)")
        except:
            pass
        else:
            self.fail("Could insert the same integer twice into a unique field.")
        db.rollback_transaction()
        # Altering one column should not drop or modify multi-column constraint
        db.alter_column("test_alter_unique2", "eggs", models.CharField(max_length=10))
        db.start_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (1, 42)")
        except:
            self.fail("Altering one column broken multi-column unique constraint.")
        db.start_transaction()
        db.rollback_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 43)")
        except:
            self.fail("Altering one column broken multi-column unique constraint.")
        db.rollback_transaction()
        db.start_transaction()
        try:
            db.execute("INSERT INTO test_alter_unique2 (spam, eggs) VALUES (0, 42)")
        except:
            pass
        else:
            self.fail("Could insert the same integer twice into a unique field after alter_column with unique=False.")
        db.rollback_transaction()
        db.delete_table("test_alter_unique2")
        db.start_transaction()
class Migration(SchemaMigration):
    def forwards(self, orm):
        """
        Added custom transaction processing for transactional DBMS.
        If a DDL operation fails, the entire transaction fails and all future commands are ignored.
        """

        # Adding model 'OozieStreamingAction'
        db.create_table('jobsub_ooziestreamingaction', (
            ('oozieaction_ptr',
             self.gf('django.db.models.fields.related.OneToOneField')(
                 to=orm['jobsub.OozieAction'], unique=True, primary_key=True)),
            ('files', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('mapper',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('reducer',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('job_properties',
             self.gf('django.db.models.fields.TextField')(default='[]')),
            ('archives', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
        ))
        db.send_create_signal('jobsub', ['OozieStreamingAction'])

        # Adding model 'OozieAction'
        db.create_table('jobsub_oozieaction', (
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('action_type',
             self.gf('django.db.models.fields.CharField')(max_length=64)),
        ))
        db.send_create_signal('jobsub', ['OozieAction'])

        # Adding model 'OozieDesign'
        db.create_table('jobsub_ooziedesign', (
            ('description', self.gf('django.db.models.fields.CharField')(
                max_length=1024, blank=True)),
            ('last_modified', self.gf('django.db.models.fields.DateTimeField')(
                auto_now=True, blank=True)),
            ('owner', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['auth.User'])),
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('root_action',
             self.gf('django.db.models.fields.related.ForeignKey')(
                 to=orm['jobsub.OozieAction'])),
            ('name',
             self.gf('django.db.models.fields.CharField')(max_length=64)),
        ))
        db.send_create_signal('jobsub', ['OozieDesign'])

        # Adding model 'JobHistory'
        db.create_table('jobsub_jobhistory', (
            ('owner', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['auth.User'])),
            ('submission_date',
             self.gf('django.db.models.fields.DateTimeField')(auto_now=True,
                                                              blank=True)),
            ('design', self.gf('django.db.models.fields.related.ForeignKey')(
                to=orm['jobsub.OozieDesign'])),
            ('id',
             self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('job_id',
             self.gf('django.db.models.fields.CharField')(max_length=128)),
        ))
        db.send_create_signal('jobsub', ['JobHistory'])

        # Adding model 'OozieMapreduceAction'
        db.create_table('jobsub_ooziemapreduceaction', (
            ('oozieaction_ptr',
             self.gf('django.db.models.fields.related.OneToOneField')(
                 to=orm['jobsub.OozieAction'], unique=True, primary_key=True)),
            ('files', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('jar_path',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('archives', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('job_properties',
             self.gf('django.db.models.fields.TextField')(default='[]')),
        ))
        db.send_create_signal('jobsub', ['OozieMapreduceAction'])

        # Adding model 'OozieJavaAction'
        db.create_table('jobsub_ooziejavaaction', (
            ('oozieaction_ptr',
             self.gf('django.db.models.fields.related.OneToOneField')(
                 to=orm['jobsub.OozieAction'], unique=True, primary_key=True)),
            ('files', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('jar_path',
             self.gf('django.db.models.fields.CharField')(max_length=512)),
            ('java_opts', self.gf('django.db.models.fields.CharField')(
                max_length=256, blank=True)),
            ('args', self.gf('django.db.models.fields.TextField')(blank=True)),
            ('job_properties',
             self.gf('django.db.models.fields.TextField')(default='[]')),
            ('archives', self.gf('django.db.models.fields.CharField')(
                default='[]', max_length=512)),
            ('main_class',
             self.gf('django.db.models.fields.CharField')(max_length=256)),
        ))
        db.send_create_signal('jobsub', ['OozieJavaAction'])

        # Adding field 'CheckForSetup.setup_level'
        db.add_column(
            'jobsub_checkforsetup',
            'setup_level',
            self.gf('django.db.models.fields.IntegerField')(default=0),
            keep_default=False)

        # The next sequence may fail... so they should have their own transactions.
        db.commit_transaction()

        # Delete legacy tables. Note that this only applies to Hue 1.x installations
        db.start_transaction()
        try:
            db.delete_table('jobsub_submission')
            remove_content_type('jobsub', 'submission')
            db.commit_transaction()
        except Exception, ex:
            db.rollback_transaction()

        db.start_transaction()
        try:
            db.delete_table('jobsub_serversubmissionstate')
            remove_content_type('jobsub', 'serversubmissionstate')
            db.commit_transaction()
        except Exception, ex:
            db.rollback_transaction()
コード例 #49
0
    def forwards(self, orm):

        try:
            # if old "roles" app tables are found, rename them to "hr" tables
            db.rename_table('roles_member', 'hr_member')
            db.rename_table('roles_memberdiff', 'hr_memberdiff')

            db.rename_table('roles_roletype', 'hr_roletype')
            db.rename_table('roles_role', 'hr_role')
            db.rename_table('roles_rolemembership', 'hr_rolemembership')
            db.rename_table('roles_rolememberdiff', 'hr_rolememberdiff')

            db.rename_table('roles_title', 'hr_title')
            db.rename_table('roles_titlemembership', 'hr_titlemembership')
            db.rename_table('roles_titlecomposition', 'hr_titlecomposition')
            db.rename_table('roles_titlecompodiff', 'hr_titlecompodiff')
            db.rename_table('roles_titlememberdiff', 'hr_titlememberdiff')

            return
        except:
            # if tables not found, do the standard migration
            db.rollback_transaction()
            db.start_transaction()
        # Adding model 'Member'
        db.create_table('hr_member', (
            ('characterID', self.gf('django.db.models.fields.BigIntegerField')(primary_key=True)),
            ('name', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),
            ('nickname', self.gf('django.db.models.fields.CharField')(default='', max_length=256)),
            ('baseID', self.gf('django.db.models.fields.BigIntegerField')(default=0)),
            ('corpDate', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 203014))),
            ('lastLogin', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 203061))),
            ('lastLogoff', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 203094))),
            ('locationID', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
            ('location', self.gf('django.db.models.fields.CharField')(default='???', max_length=256, null=True, blank=True)),
            ('ship', self.gf('django.db.models.fields.CharField')(default='???', max_length=128)),
            ('accessLvl', self.gf('django.db.models.fields.BigIntegerField')(default=0)),
            ('corped', self.gf('django.db.models.fields.BooleanField')(default=True)),
            ('owner', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='characters', null=True, to=orm['auth.User'])),
            ('notes', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
        ))
        db.send_create_signal('hr', ['Member'])

        # Adding model 'MemberDiff'
        db.create_table('hr_memberdiff', (
            ('id', self.gf('ecm.lib.bigintpatch.BigAutoField')(primary_key=True)),
            ('member', self.gf('django.db.models.fields.related.ForeignKey')(related_name='diffs', to=orm['hr.Member'])),
            ('name', self.gf('django.db.models.fields.CharField')(max_length=100, db_index=True)),
            ('nickname', self.gf('django.db.models.fields.CharField')(max_length=256, db_index=True)),
            ('new', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
            ('date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 204283), db_index=True)),
        ))
        db.send_create_signal('hr', ['MemberDiff'])

        # Adding model 'RoleType'
        db.create_table('hr_roletype', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('typeName', self.gf('django.db.models.fields.CharField')(unique=True, max_length=64)),
            ('dispName', self.gf('django.db.models.fields.CharField')(max_length=64)),
        ))
        db.send_create_signal('hr', ['RoleType'])

        # Adding model 'Role'
        db.create_table('hr_role', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('roleType', self.gf('django.db.models.fields.related.ForeignKey')(related_name='roles', to=orm['hr.RoleType'])),
            ('roleID', self.gf('django.db.models.fields.BigIntegerField')()),
            ('roleName', self.gf('django.db.models.fields.CharField')(max_length=64)),
            ('dispName', self.gf('django.db.models.fields.CharField')(max_length=64)),
            ('description', self.gf('django.db.models.fields.CharField')(max_length=256)),
            ('hangar', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['corp.Hangar'], null=True, blank=True)),
            ('wallet', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['corp.Wallet'], null=True, blank=True)),
            ('accessLvl', self.gf('django.db.models.fields.BigIntegerField')(default=0)),
        ))
        db.send_create_signal('hr', ['Role'])

        # Adding model 'RoleMembership'
        db.create_table('hr_rolemembership', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('member', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Member'])),
            ('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Role'])),
        ))
        db.send_create_signal('hr', ['RoleMembership'])

        # Adding model 'RoleMemberDiff'
        db.create_table('hr_rolememberdiff', (
            ('id', self.gf('ecm.lib.bigintpatch.BigAutoField')(primary_key=True)),
            ('member', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Member'])),
            ('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Role'])),
            ('new', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
            ('date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 210650), db_index=True)),
        ))
        db.send_create_signal('hr', ['RoleMemberDiff'])

        # Adding model 'Title'
        db.create_table('hr_title', (
            ('titleID', self.gf('django.db.models.fields.BigIntegerField')(primary_key=True)),
            ('titleName', self.gf('django.db.models.fields.CharField')(max_length=256)),
            ('tiedToBase', self.gf('django.db.models.fields.BigIntegerField')(default=0)),
            ('accessLvl', self.gf('django.db.models.fields.BigIntegerField')(default=0)),
        ))
        db.send_create_signal('hr', ['Title'])

        # Adding model 'TitleMembership'
        db.create_table('hr_titlemembership', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('member', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Member'])),
            ('title', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Title'])),
        ))
        db.send_create_signal('hr', ['TitleMembership'])

        # Adding model 'TitleComposition'
        db.create_table('hr_titlecomposition', (
            ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
            ('title', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Title'])),
            ('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Role'])),
        ))
        db.send_create_signal('hr', ['TitleComposition'])

        # Adding model 'TitleCompoDiff'
        db.create_table('hr_titlecompodiff', (
            ('id', self.gf('ecm.lib.bigintpatch.BigAutoField')(primary_key=True)),
            ('title', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Title'])),
            ('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Role'])),
            ('new', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
            ('date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 216463), db_index=True)),
        ))
        db.send_create_signal('hr', ['TitleCompoDiff'])

        # Adding model 'TitleMemberDiff'
        db.create_table('hr_titlememberdiff', (
            ('id', self.gf('ecm.lib.bigintpatch.BigAutoField')(primary_key=True)),
            ('member', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Member'])),
            ('title', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['hr.Title'])),
            ('new', self.gf('django.db.models.fields.BooleanField')(default=True, db_index=True)),
            ('date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2011, 10, 22, 12, 40, 4, 217222), db_index=True)),
        ))
        db.send_create_signal('hr', ['TitleMemberDiff'])