def forwards(self, orm): try: bucket_name = settings.AWS_STORAGE_BUCKET_NAME except AttributeError: bucket_name = 'media.demozoo.org' try: bucket_format = settings.AWS_BOTO_CALLING_FORMAT except AttributeError: bucket_format = 'VHostCallingFormat' if bucket_format == 'VHostCallingFormat': url_prefix = "http://%s/" % bucket_name else: url_prefix = "http://%s.s3.amazonaws.com/" % bucket_name # Adding field 'Platform.photo_url' db.add_column(u'platforms_platform', 'photo_url', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True), keep_default=False) # Adding field 'Platform.thumbnail_url' db.add_column(u'platforms_platform', 'thumbnail_url', self.gf('django.db.models.fields.CharField')(default='', max_length=255, blank=True), keep_default=False) db.execute("UPDATE platforms_platform SET photo_url = %s || photo WHERE photo IS NOT NULL", [url_prefix]) db.execute("UPDATE platforms_platform SET thumbnail_url = %s || thumbnail WHERE thumbnail IS NOT NULL", [url_prefix])
def test_alter_column_postgres_multiword(self): """ Tests altering columns with multiple words in Postgres types (issue #125) e.g. 'datetime with time zone', look at django/db/backends/postgresql/creation.py """ db.create_table("test_multiword", [ ('col_datetime', models.DateTimeField(null=True)), ('col_integer', models.PositiveIntegerField(null=True)), ('col_smallint', models.PositiveSmallIntegerField(null=True)), ('col_float', models.FloatField(null=True)), ]) # test if 'double precision' is preserved db.alter_column('test_multiword', 'col_float', models.FloatField('float', null=True)) # test if 'CHECK ("%(column)s" >= 0)' is stripped db.alter_column('test_multiword', 'col_integer', models.PositiveIntegerField(null=True)) db.alter_column('test_multiword', 'col_smallint', models.PositiveSmallIntegerField(null=True)) # test if 'with timezone' is preserved if db.backend_name == "postgres": db.execute("INSERT INTO test_multiword (col_datetime) VALUES ('2009-04-24 14:20:55+02')") db.alter_column('test_multiword', 'col_datetime', models.DateTimeField(auto_now=True)) assert db.execute("SELECT col_datetime = '2009-04-24 14:20:55+02' FROM test_multiword")[0][0] db.delete_table("test_multiword")
def forwards(self, orm): """ Apply this database migration. """ # Add a full-text index to the Posting.heading field: db.execute(""" ALTER TABLE shared_posting ADD COLUMN heading_tsv tsvector; CREATE TRIGGER tsvectorupdateheading BEFORE INSERT OR UPDATE ON shared_posting FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(heading_tsv, 'pg_catalog.english', heading); CREATE INDEX shared_posting_heading_tsv ON shared_posting USING gist(heading_tsv); UPDATE shared_posting SET heading_tsv=to_tsvector(heading);""") # Add a full-text index to the Posting.body field: db.execute(""" ALTER TABLE shared_posting ADD COLUMN body_tsv tsvector; CREATE TRIGGER tsvectorupdatebody BEFORE INSERT OR UPDATE ON shared_posting FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(body_tsv, 'pg_catalog.english', body); CREATE INDEX shared_posting_body_tsv ON shared_posting USING gist(body_tsv); UPDATE shared_posting SET body_tsv=to_tsvector(body);""")
def forwards(self, orm): db.start_transaction() # Argh. The DB dump has a lot of crap we don't care about at all. # So, use South's fake ORM dictionary to figure out how to create those columns. db.add_column('gis_neighborhoods', 'stacked', orm['bmabr.neighborhood:stacked']) db.add_column('gis_neighborhoods', 'annoline1', orm['bmabr.neighborhood:annoline1']), db.add_column('gis_neighborhoods', 'annoline2', orm['bmabr.neighborhood:annoline2']), db.add_column('gis_neighborhoods', 'annoline3', orm['bmabr.neighborhood:annoline3']), db.add_column('gis_neighborhoods', 'annoangle', orm['bmabr.neighborhood:annoangle']), db.commit_transaction() # Now load the data. db.start_transaction() HERE = os.path.abspath(os.path.dirname(__file__)) sql_path = os.path.abspath( os.path.join(HERE, '..', '..', 'sql', 'gis_neighborhoods.sql')) db.execute_many(open(sql_path).read()) db.execute("UPDATE gis_neighborhoods SET state = 'NY'") db.commit_transaction() # Now clean up the crap we don't want. db.start_transaction() db.delete_column('gis_neighborhoods', 'stacked') db.delete_column('gis_neighborhoods', 'annoangle') db.delete_column('gis_neighborhoods', 'annoline1') db.delete_column('gis_neighborhoods', 'annoline2') db.delete_column('gis_neighborhoods', 'annoline3') db.commit_transaction()
def forwards(self, orm): if connection.vendor == 'sqlite': return True # Convert Configuration model to UTF-8 db.execute("ALTER TABLE seo_configuration " "CONVERT TO CHARACTER SET utf8 " "COLLATE utf8_general_ci")
def forwards(self, orm): # Adding model 'ServiceEvent' db.create_table(u'watch_serviceevent', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('service', self.gf('django.db.models.fields.related.ForeignKey')(related_name='events', to=orm['watch.Service'])), ('maxx_ActualArrivalTime', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)), ('maxx_ActualDepartureTime', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)), ('maxx_ArrivalStatus', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=200, null=True, blank=True)), ('maxx_ExpectedArrivalTime', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)), ('maxx_ExpectedDepartureTime', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)), ('maxx_InCongestion', self.gf('django.db.models.fields.NullBooleanField')(db_index=True, null=True, blank=True)), ('maxx_Monitored', self.gf('django.db.models.fields.NullBooleanField')(db_index=True, null=True, blank=True)), ('maxx_TimeStamp', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)), ('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)), )) db.send_create_signal(u'watch', ['ServiceEvent']) # Deleting field 'Service.monitored_departure_time' db.delete_column(u'watch_service', 'monitored_departure_time') # Deleting field 'Service.last_monitored_at' db.delete_column(u'watch_service', 'last_monitored_at') # Deleting field 'Service.last_data_json' db.delete_column(u'watch_service', 'last_data_json') # Deleting field 'Service.first_monitored_at' db.delete_column(u'watch_service', 'first_monitored_at') if not db.dry_run: db.execute('DELETE FROM watch_service;')
def forwards(self, orm): for link in orm['djotero.zoterolink'].objects.all(): db.execute( u'UPDATE main_document ' 'SET zotero_data = %s, zotero_link_id = %s ' 'WHERE id = %s;', params=[link.zotero_data, link.id, link.doc_id])
def forwards(self, orm): # Changing field 'InstitutionDetails.number_id' db.alter_column('edumanage_institutiondetails', 'number_id', self.gf('django.db.models.fields.PositiveIntegerField')(max_length=6, null=True, blank=True)) # Deleting field 'InstRealmMon.instid' db.delete_column('edumanage_instrealmmon', 'instid_id') # Renaming column for 'InstRealmMon.realm' to match new field type. db.rename_column('edumanage_instrealmmon', 'realm', 'realm_id') # Changing field 'InstRealmMon.realm' # WORKAROUND NEEDED # This migration breaks with PostgreSQL with: # ERROR: column "realm_id" cannot be cast automatically to type integer # HINT: Specify a USING expression to perform the conversion. # STATEMENT: ALTER TABLE "edumanage_instrealmmon" ALTER COLUMN "realm_id" TYPE integer, # ALTER COLUMN "realm_id" SET NOT NULL, ALTER COLUMN "realm_id" DROP DEFAULT; # This is a known problem: http://south.aeracode.org/ticket/484 # (PostgreSQL will not automatically convert and conversion must be provided with the USING clause) # Workaround: For PostgreSQL invoke a direct SQL statement amended with a USING clause to do the converion explicitly. # Credits: # * http://codeinthehole.com/writing/altering-postgres-table-columns-with-south/ # * http://stackoverflow.com/questions/13170570/change-type-of-varchar-field-to-integer-cannot-be-cast-automatically-to-type-i if ( db._get_connection().vendor == "postgresql" ): db.execute('ALTER TABLE "edumanage_instrealmmon" ALTER COLUMN "realm_id" TYPE integer USING (trim(realm_id)::integer), ALTER COLUMN "realm_id" SET NOT NULL, ALTER COLUMN "realm_id" DROP DEFAULT;') else: db.alter_column('edumanage_instrealmmon', 'realm_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['edumanage.InstRealm'])) # Adding index on 'InstRealmMon', fields ['realm'] db.create_index('edumanage_instrealmmon', ['realm_id']) # Changing field 'InstRealmMon.mon_type' db.alter_column('edumanage_instrealmmon', 'mon_type', self.gf('django.db.models.fields.CharField')(max_length=16))
def forwards(self, orm): # Adding field 'BrandProposal.status' db.add_column(u'brand_proposal', 'status', self.gf('django.db.models.fields.NullBooleanField')(null=True, db_column=u'STATUS', blank=True), keep_default=False) db.execute("TRUNCATE TABLE brand_proposal CASCADE;") db.execute("TRUNCATE TABLE brand_proposal_review;")
def forwards(self, orm): # You are running PostgresSQL right db.execute(""" CREATE INDEX main_event_transcript_fts_idx ON main_event USING gin(to_tsvector('english', transcript)); """)
def forwards(self, orm): db.execute(""" CREATE FUNCTION set_fault_simplegeom(target_fault_id integer) RETURNS VOID AS $$ DECLARE sections_united geometry; BEGIN SELECT ST_Union(fault_section_view.geom) FROM gem.fault_section_view JOIN gem.observations_faultsection_fault ON (fault_section_view.id = observations_faultsection_fault.faultsection_id) WHERE observations_faultsection_fault.fault_id = target_fault_id INTO sections_united; UPDATE gem.observations_fault SET simple_geom = ST_Multi(ST_LongestLine(sections_united, sections_united)) WHERE id = target_fault_id; END; $$ LANGUAGE plpgsql VOLATILE STRICT; """)
def backwards(self, orm): "Write your backwards methods here." if db.backend_name == 'mysql': table_name = orm['askbot.Thread']._meta.db_table if mysql_table_supports_full_text_search(table_name): sql = get_drop_index_sql(INDEX_NAME, table_name) db.execute(sql)
def test_change_foreign_key_target(self): # Tables for FK to target User = db.mock_model(model_name='User', db_table='auth_user', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) db.create_table("test_fk_changed_target", [ ('eggs', models.IntegerField(primary_key=True)), ]) Egg = db.mock_model(model_name='Egg', db_table='test_fk_changed_target', db_tablespace='', pk_field_name='eggs', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) # Table with a foreign key to the wrong table db.create_table("test_fk_changing", [ ('egg', models.ForeignKey(User, null=True)), ]) db.execute_deferred_sql() # Change foreign key pointing db.alter_column("test_fk_changing", "egg_id", models.ForeignKey(Egg, null=True)) db.execute_deferred_sql() # Test that it is pointing at the right table now try: non_user_id = db.execute("SELECT MAX(id) FROM auth_user")[0][0] + 1 except (TypeError, IndexError): # Got a "None" or no records, treat as 0 non_user_id = 17 db.execute("INSERT INTO test_fk_changed_target (eggs) VALUES (%s)", [non_user_id]) db.execute("INSERT INTO test_fk_changing (egg_id) VALUES (%s)", [non_user_id]) db.commit_transaction() db.start_transaction() # The test framework expects tests to end in transaction
def forwards(self, orm): # Adding field 'User.last_login' db.add_column('tomato_user', 'last_login', self.gf('django.db.models.fields.FloatField')(default=1352370115.345284), keep_default=False) # Changing field 'User.password_time' db.execute("ALTER TABLE tomato_user ALTER password_time TYPE float USING 0.0;")
def backwards(self, orm): """ Custom migration to remove 'has_image' field from Posting table. """ db.execute(""" DROP INDEX shared_posting_has_image; ALTER TABLE shared_posting DROP COLUMN has_image; """)
def forwards(self, orm): """ Custom migration to add 'has_image' field to Posting table. """ db.execute(""" ALTER TABLE shared_posting ADD COLUMN has_image boolean NOT NULL DEFAULT FALSE; CREATE INDEX shared_posting_has_image ON shared_posting USING btree(has_image); """)
def forwards(self, orm): cursor1 = connection.cursor() cursor1.execute("UPDATE archives_archive_collectivities as t1 INNER JOIN archives_archive AS t2 ON t1.`archive_id` = t2.`id_archiprod` SET t1.`archive_id` = t2.`id`") cursor2 = connection.cursor() cursor2.execute("UPDATE archives_archive_tags as t1 INNER JOIN archives_archive AS t2 ON t1.`archive_id` = t2.`id_archiprod` SET t1.`archive_id` = t2.`id`") cursor3 = connection.cursor() cursor3.execute("UPDATE archives_archiveparticipant as t1 INNER JOIN archives_archive AS t2 ON t1.`archive_id` = t2.`id_archiprod` SET t1.`archive_id` = t2.`id`") cursor4 = connection.cursor() cursor4.execute("UPDATE archives_contract as t1 INNER JOIN archives_archive AS t2 ON t1.`archive_id` = t2.`id_archiprod` SET t1.`archive_id` = t2.`id`") cursor5 = connection.cursor() cursor5.execute("UPDATE archives_media as t1 INNER JOIN archives_archive AS t2 ON t1.`archive_id` = t2.`id_archiprod` SET t1.`archive_id` = t2.`id`") cursor6 = connection.cursor() cursor6.execute("UPDATE archives_archive SET time=NULL WHERE time=''") cursor7 = connection.cursor() cursor7.execute("UPDATE archives_archive SET time=NULL WHERE time=':'") #fix ACANTHES MIGRATION ARCHIVES 0018 db.execute('ALTER TABLE archives_archive CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci') db.execute('ALTER TABLE archives_media CONVERT TO CHARACTER SET utf8 COLLATE utf8_unicode_ci') transaction.commit_unless_managed()
def forwards(self, orm): # Changing field 'Asset.deprecation_rate' db.alter_column('ralph_assets_asset', 'deprecation_rate', self.gf('django.db.models.fields.DecimalField')(max_digits=5, decimal_places=2)) # Adding field 'AssetModel.category' db.add_column('ralph_assets_assetmodel', 'category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['ralph_assets.AssetCategory'], null=True, blank=True), keep_default=False) # Adding field 'AssetModel.power_consumption' db.add_column('ralph_assets_assetmodel', 'power_consumption', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True), keep_default=False) # Adding field 'AssetModel.height_of_device' db.add_column('ralph_assets_assetmodel', 'height_of_device', self.gf('django.db.models.fields.IntegerField')(default=0, blank=True), keep_default=False) # Migrate categories from assets to assets models for asset in Asset.objects.values_list( 'category_id', 'model_id').filter(): db.execute( "UPDATE ralph_assets_assetmodel SET category_id = %s " "WHERE id = %s", asset)
def forwards(self, orm): # Changing field 'Sand.dirt' db.alter_column('lab_sand', 'dirt', self.gf('django.db.models.fields.TextField')(null=True)) # Changing field 'Sand.module_size' db.alter_column('lab_sand', 'module_size', self.gf('django.db.models.fields.FloatField')(null=True)) # Changing field 'Sand.particle_size' db.alter_column('lab_sand', 'particle_size', self.gf('django.db.models.fields.FloatField')(null=True)) # Changing field 'Bar.humidity_transporter' db.alter_column('lab_bar', 'humidity_transporter', self.gf('django.db.models.fields.FloatField')(null=True)) # Changing field 'Clay.inclusion' db.alter_column('lab_clay', 'inclusion', self.gf('django.db.models.fields.FloatField')(null=True)) # Changing field 'Clay.humidity' db.execute('ALTER TABLE "lab_clay" ALTER "humidity" SET DATA TYPE numeric(10,2) USING humidity::numeric(10,2);') db.alter_column('lab_clay', 'humidity', self.gf('django.db.models.fields.FloatField')()) # Changing field 'Clay.sand' db.alter_column('lab_clay', 'sand', self.gf('django.db.models.fields.FloatField')(null=True)) # Changing field 'Clay.dust' db.alter_column('lab_clay', 'dust', self.gf('django.db.models.fields.FloatField')(null=True))
def forwards(self, orm): # Adding model 'Recruit' db.create_table('hr_recruit', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)), ('recruiter', self.gf('django.db.models.fields.related.ForeignKey')(related_name='recruiter', to=orm['auth.User'])), )) db.send_create_signal('hr', ['Recruit']) # Adding M2M table for field reference on 'Recruit' db.create_table('hr_recruit_reference', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('recruit', models.ForeignKey(orm['hr.recruit'], null=False)), ('user', models.ForeignKey(orm['auth.user'], null=False)) )) db.create_unique('hr_recruit_reference', ['recruit_id', 'user_id']) # Changing field 'TitleCompoDiff.id' if 'postgres' in db.backend_name.lower(): db.execute("ALTER TABLE hr_titlecompodiff ALTER COLUMN id TYPE integer;") else: db.alter_column('hr_titlecompodiff', 'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)) # Changing field 'TitleMemberDiff.id' if 'postgres' in db.backend_name.lower(): db.execute("ALTER TABLE hr_titlememberdiff ALTER COLUMN id TYPE integer;") else: db.alter_column('hr_titlememberdiff', 'id', self.gf('django.db.models.fields.AutoField')(primary_key=True))
def forwards(self, orm): db.execute(""" CREATE OR REPLACE FUNCTION can_view_superusers() returns BOOLEAN AS $$ BEGIN RETURN 'admins' = ANY( SELECT current_user UNION SELECT b.rolname FROM pg_roles AS b JOIN pg_auth_members AS m ON m.roleid = b.oid JOIN pg_roles AS a ON m.member = a.oid WHERE a.rolname = user ); END $$ LANGUAGE plpgsql; CREATE VIEW accounts_avaiable_users AS SELECT * FROM accounts_user WHERE NOT is_superuser OR can_view_superusers(); GRANT SELECT ON accounts_avaiable_users TO managers; GRANT SELECT ON accounts_avaiable_users TO admins; REVOKE ALL ON accounts_user FROM managers; """)
def seed(self): #get sql file seed_file = open(os.path.join(SETTINGS.PROJECT_ROOT, '..', 'citi_digits','migrations') + "/stats_fixture.sql") for line in seed_file.readlines(): #strip off semicolon insertLine = line[0:len(line)-1] db.execute(insertLine)
def test_datetime_default(self): """ Test that defaults are created correctly for datetime columns """ end_of_world = datetime.datetime(2012, 12, 21, 0, 0, 1) try: from django.utils import timezone except ImportError: pass else: from django.conf import settings if getattr(settings, 'USE_TZ', False): end_of_world = end_of_world.replace(tzinfo=timezone.utc) db.create_table("test_datetime_def", [ ('col0', models.IntegerField(null=True)), ('col1', models.DateTimeField(default=end_of_world)), ('col2', models.DateTimeField(null=True)), ]) db.alter_column("test_datetime_def", "col2", models.DateTimeField(default=end_of_world)) db.add_column("test_datetime_def", "col3", models.DateTimeField(default=end_of_world)) db.execute("insert into test_datetime_def (col0) values (null)") ends = db.execute("select col1,col2,col3 from test_datetime_def")[0] self.failUnlessEqual(len(ends), 3) for e in ends: self.failUnlessEqual(e, end_of_world)
def forwards(self, orm): for dim in ['x', 'y', 'z']: db.execute(''' ALTER TABLE location ADD CONSTRAINT location_%(dim)s_finite CHECK (location_%(dim)s <> 'NaN' AND location_%(dim)s <> 'Infinity') ''' % {'dim': dim}) pass
def forwards(self, orm): db.execute(""" DROP FUNCTION IF EXISTS timetrack_user_report(INTEGER, DATE, DATE); CREATE OR REPLACE FUNCTION timetrack_user_report (userid INTEGER, from_date DATE, to_date DATE) RETURNS TABLE ( description CHAR(200), project CHAR(50), estimate INTEGER, totaltime INTERVAL ) AS $$ SELECT t.description, p.name AS project, t.estimate, a.totaltime FROM ( SELECT wl.task_id, wl.user_id, SUM(wl.finish_at - wl.start_at) AS totaltime FROM timetrack_worklog AS wl JOIN timetrack_task AS t ON t.id = wl.task_id WHERE user_id = $1 AND wl.finish_at <= $3 AND wl.start_at >= $2 GROUP BY wl.task_id, wl.user_id ) AS a JOIN timetrack_task AS t ON t.id = a.task_id JOIN accounts_user AS u ON u.id = a.user_id JOIN timetrack_project AS p ON p.id = t.project_id; $$ LANGUAGE sql; """)
def forwards(self, orm): db.execute("DROP SEQUENCE instructor_id_seq CASCADE") db.execute("CREATE SEQUENCE instructor_id_seq") db.execute("SELECT setval('instructor_id_seq', (SELECT MAX(id) FROM instructor))") db.execute("ALTER TABLE instructor ALTER COLUMN id SET DEFAULT nextval('instructor_id_seq'::regclass)") db.execute("ALTER SEQUENCE instructor_id_seq OWNED BY instructor.id") # Deleting field 'Course.instructorid' db.delete_column(u'course', 'instructorId') # Deleting field 'Course.institutionid' db.delete_column(u'course', 'institutionId') # Adding field 'Course.institution' db.add_column(u'course', 'institution', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['coresite.Institution'], db_column='institutionId'), keep_default=False) # Adding field 'Course.instructor' db.add_column(u'course', 'instructor', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['coresite.Instructor'], db_column='instructorId'), keep_default=False) # Adding field 'Course.cost' db.add_column(u'course', 'cost', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=7, decimal_places=2, blank=True), keep_default=False)
def forwards(self, orm): # Adding field 'JailTemplate.jt_os' db.add_column(u'jails_jailtemplate', 'jt_os', self.gf('django.db.models.fields.CharField')(default='FreeBSD', max_length=120), keep_default=False) db.execute("update jails_jailtemplate set jt_os = 'Linux' " "where jt_name not in ('pluginjail', 'portjail', 'standard')")
def forwards(self, orm): "Create archived contacts and associate them to contact records." db.execute(CREATE_CONTACTS) db.execute(LINK_CONTACTS) "Deleting field 'ContactRecord.contact_name'" db.delete_column(u'mypartners_contactrecord', 'contact_name')
def forwards(self, orm): "Write your forwards methods here." sql="drop function contact_update_message() cascade;" try: db.execute(sql) except DatabaseError: transaction.rollback()
def backwards(self, orm): "Write your backwards migration here" result = db.execute("SELECT id FROM photos_format") for row in result: id = row[0] # take the first site for every format... min_site = db.execute("SELECT MIN(site_id) FROM photos_format_sites WHERE format_id = %s", (id,))[0] # ... put it onto the format ... db.execute("UPDATE photos_format SET site_id = %s WHERE id = %s", (min_site, id)) # ... and remove the site from the m2m db.execute("DELETE FROM photos_format_sites WHERE format_id = %s AND site_id = %s", (id, site_id)) # copy all the remaining sites from m2m to format as new rows db.execute( """ INSERT INTO photos_format (site_id, name, %s) SELECT pfs.site_id, pf.name, %s FROM photos_format as pf INNER JOIN photos_format_sites as pfs ON (pfs.format_id = pf.id) """ % (", ".join(FORMAT_FIELDS), ", ".join(map(lambda f: "pf." + f, FORMAT_FIELDS))) ) # DELETE formats with no site db.execute("DELETE FROM photos_format WHERE site_id IS NULL")
def forwards(self): db.execute("ALTER TABLE sa_useraccess ALTER selector_id SET NOT NULL")
def forwards(self): db.execute("ALTER TABLE auth_user ALTER username TYPE VARCHAR(75)")
def forwards(self): db.execute("DELETE FROM sa_maptask") db.execute("DELETE FROM sa_reducetask") db.execute("COMMIT") db.add_column("sa_reducetask", "script", models.TextField("Script")) db.delete_column("sa_reducetask", "reduce_script")
def backwards(self): db.execute("ALTER TABLE auth_user ALTER username TYPE VARCHAR(30)")
def forwards(self, orm): db.execute( "UPDATE auth_user SET password = '******' || SUBSTRING(password FROM 3) WHERE password LIKE 'bc$%%'" )
def forwards(self, orm): db.execute("DROP INDEX IF EXISTS commtrack_stockstate_case_id_like") db.execute("DROP INDEX IF EXISTS commtrack_stockstate_product_id_like") db.execute("DROP INDEX IF EXISTS commtrack_stockstate_section_id_like")
def forwards(self, orm): db.execute('UPDATE sentry_rule SET data = %s WHERE data = %s', [NEW_RULE_VALID, NEW_RULE_INVALID]); db.execute('UPDATE sentry_rule SET data = %s WHERE data = %s', [REGRESSION_RULE_VALID, REGRESSION_RULE_INVALID]);
def forwards(self, orm): for p in orm.Presentation.objects.values('id', 'presenter'): print "Presentation data: " + str(p) db.execute( "insert into speakers_presentation_presenter (presentation_id, userprofile_id) values(%s, %s)", [int(p['id']), int(p['presenter'])])
def forwards(self, orm): "Write your forwards methods here." year_now = datetime.datetime.now().year sql =\ """ create or replace view contacts_export as SELECT "rapidsms_contact"."id", "rapidsms_contact"."name" as name, (SELECT "rapidsms_connection"."identity" FROM "rapidsms_connection" WHERE "rapidsms_connection"."contact_id" = "rapidsms_contact"."id" LIMIT 1) as mobile, "rapidsms_contact"."language", (SELECT DATE("script_scriptsession"."start_time") FROM "script_scriptsession" INNER JOIN "rapidsms_connection" ON ( "script_scriptsession"."connection_id" = "rapidsms_connection"."id" ) WHERE "rapidsms_connection"."contact_id" = "rapidsms_contact"."id" LIMIT 1) as autoreg_join_date, (SELECT "rapidsms_httprouter_message"."date" FROM "rapidsms_httprouter_message" WHERE "rapidsms_httprouter_message"."direction" = 'I' and "rapidsms_httprouter_message"."application" = 'unregister' and "rapidsms_httprouter_message"."connection_id" = ( SELECT "rapidsms_connection"."id" FROM "rapidsms_connection" WHERE "rapidsms_connection"."contact_id" = "rapidsms_contact"."id" LIMIT 1 ) LIMIT 1 ) as quit_date, "locations_location"."name" as district, ( %d-EXTRACT('year' FROM "rapidsms_contact"."birthdate")) as age, "rapidsms_contact"."gender", "rapidsms_contact"."health_facility" as facility, (SELECT "locations_location"."name" FROM "locations_location" WHERE "locations_location"."id"="rapidsms_contact"."village_id") as village, (SELECT "auth_group"."name" FROM "auth_group" INNER JOIN "rapidsms_contact_groups" ON ( "auth_group"."id" = "rapidsms_contact_groups"."group_id" ) WHERE "rapidsms_contact_groups"."contact_id" = "rapidsms_contact"."id" order by "auth_group"."id" desc LIMIT 1) as group, (SELECT "rapidsms_httprouter_message"."text" FROM "rapidsms_httprouter_message" JOIN "poll_response" ON "poll_response"."message_id"= "rapidsms_httprouter_message"."id" where poll_id=121 and contact_id="rapidsms_contact"."id" and has_errors='f' limit 1) as source, (SELECT COUNT(*) FROM "poll_response" WHERE "poll_response"."contact_id"="rapidsms_contact"."id") as responses, (SELECT DISTINCT COUNT(*) FROM "poll_poll_contacts" WHERE "poll_poll_contacts"."contact_id"="rapidsms_contact"."id" GROUP BY "poll_poll_contacts"."contact_id") as questions, (SELECT DISTINCT count(*) FROM "rapidsms_httprouter_message" WHERE "rapidsms_httprouter_message"."direction" ='I' and "rapidsms_httprouter_message"."connection_id" = ( SELECT "rapidsms_connection"."id" FROM "rapidsms_connection" WHERE "rapidsms_connection"."contact_id" = "rapidsms_contact"."id" LIMIT 1 ) ) as incoming FROM "rapidsms_contact" LEFT JOIN "locations_location" ON "rapidsms_contact"."reporting_location_id" = "locations_location"."id"; """\ % year_now db.execute(sql)
def backwards(self, orm): db.execute( "UPDATE auth_user SET password = '******' || SUBSTRING(password FROM 7) WHERE password LIKE 'bcrypt$%%'" )
def backwards(self): db.execute("DROP FUNCTION free_ip(INTEGER,CIDR)")
def test_unique(self): """ Tests creating/deleting unique constraints. """ db.create_table("test_unique2", [ ('id', models.AutoField(primary_key=True)), ]) db.create_table("test_unique", [ ('spam', models.BooleanField(default=False)), ('eggs', models.IntegerField()), ('ham', models.ForeignKey(db.mock_model('Unique2', 'test_unique2'))), ]) db.execute_deferred_sql() # Add a constraint db.create_unique("test_unique", ["spam"]) db.execute_deferred_sql() # Shouldn't do anything during dry-run db.dry_run = True db.delete_unique("test_unique", ["spam"]) db.dry_run = False db.delete_unique("test_unique", ["spam"]) db.create_unique("test_unique", ["spam"]) # Special preparations for Sql Server if db.backend_name == "pyodbc": db.execute("SET IDENTITY_INSERT test_unique2 ON;") db.execute("INSERT INTO test_unique2 (id) VALUES (1)") db.execute("INSERT INTO test_unique2 (id) VALUES (2)") db.commit_transaction() db.start_transaction() # Test it works TRUE = (True, ) FALSE = (False, ) db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE) try: db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 2, 1)", FALSE) except: db.rollback_transaction() else: self.fail("Could insert non-unique item.") # Drop that, add one only on eggs db.delete_unique("test_unique", ["spam"]) db.execute("DELETE FROM test_unique") db.create_unique("test_unique", ["eggs"]) db.start_transaction() # Test similarly db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 2)", FALSE) try: db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", TRUE) except: db.rollback_transaction() else: self.fail("Could insert non-unique item.") # Drop those, test combined constraints db.delete_unique("test_unique", ["eggs"]) db.execute("DELETE FROM test_unique") db.create_unique("test_unique", ["spam", "eggs", "ham_id"]) db.start_transaction() # Test similarly db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 1, 1)", FALSE) try: db.execute( "INSERT INTO test_unique (spam, eggs, ham_id) VALUES (%s, 0, 1)", TRUE) except: db.rollback_transaction() else: self.fail("Could insert non-unique pair.") db.delete_unique("test_unique", ["spam", "eggs", "ham_id"]) db.start_transaction()
def forwards(self, orm): "Write your forwards methods here." # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." db.execute( '''update pages_page set template = 'cms/' || substr(template, 4);''' )
def forwards(self, orm): db.execute(u'DROP TABLE IF EXISTS analytics_event') db.rename_table(u'main_event', u'analytics_event')
def backwards(self, orm): "Write your backwards methods here." db.execute( '''update pages_page set template = 'p3/' || substr(template, 5);''' )
def forwards(self): db.execute(SQL)