def forwards(self, orm): # Adding model 'Network' db.create_table('network', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('vlan', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vlan.Vlan'], null=True, on_delete=models.SET_NULL, blank=True)), ('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['site.Site'], null=True, on_delete=models.SET_NULL, blank=True)), ('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)), ('ip_upper', self.gf('django.db.models.fields.BigIntegerField')(blank=True)), ('ip_lower', self.gf('django.db.models.fields.BigIntegerField')(blank=True)), ('network_str', self.gf('django.db.models.fields.CharField')(max_length=49)), ('prefixlen', self.gf('django.db.models.fields.PositiveIntegerField')()), ('dhcpd_raw_include', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), )) db.send_create_signal('network', ['Network']) # Adding unique constraint on 'Network', fields ['ip_upper', 'ip_lower', 'prefixlen'] db.create_unique('network', ['ip_upper', 'ip_lower', 'prefixlen']) # Adding model 'NetworkKeyValue' db.create_table('network_kv', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('key', self.gf('django.db.models.fields.CharField')(max_length=255)), ('value', self.gf('django.db.models.fields.CharField')(max_length=255)), ('is_quoted', self.gf('django.db.models.fields.BooleanField')(default=False)), ('is_option', self.gf('django.db.models.fields.BooleanField')(default=False)), ('is_statement', self.gf('django.db.models.fields.BooleanField')(default=False)), ('has_validator', self.gf('django.db.models.fields.BooleanField')(default=False)), ('network', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['network.Network'])), )) db.send_create_signal('network', ['NetworkKeyValue']) # Adding unique constraint on 'NetworkKeyValue', fields ['key', 'value', 'network'] db.create_unique('network_kv', ['key', 'value', 'network_id']) db.execute_many(open("cyder/cydhcp/network/sql/network.sql").read())
def backwards(self, orm): db.execute_many('\n'.join( "ALTER TABLE {table_name} MODIFY COLUMN {column_name} timestamp NULL;".format( table_name=table_name, column_name=column_name, ) for table_name, column_name in TIMESTAMP_COLUMNS ))
def forwards(self, orm): # Adding model 'PTR' db.create_table('ptr', ( ('ip_str', self.gf('django.db.models.fields.CharField')(max_length=39)), ('ip_upper', self.gf('django.db.models.fields.BigIntegerField')(null=True, blank=True)), ('ip_lower', self.gf('django.db.models.fields.BigIntegerField')(null=True, blank=True)), ('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('reverse_domain', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['domain.Domain'], blank=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=255)), ('ttl', self.gf('django.db.models.fields.PositiveIntegerField')(default=3600, null=True, blank=True)), ('description', self.gf('django.db.models.fields.CharField')(max_length=1000, null=True, blank=True)), )) db.send_create_signal('ptr', ['PTR']) # Adding unique constraint on 'PTR', fields ['ip_str', 'ip_type', 'name'] db.create_unique('ptr', ['ip_str', 'ip_type', 'name']) # Adding M2M table for field views on 'PTR' db.create_table('ptr_views', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('ptr', models.ForeignKey(orm['ptr.ptr'], null=False)), ('view', models.ForeignKey(orm['view.view'], null=False)) )) db.create_unique('ptr_views', ['ptr_id', 'view_id']) db.execute_many(open("cyder/cydns/ptr/sql/ptr.sql").read())
def forwards(self, orm): db.start_transaction() # Argh. The DB dump has a lot of crap we don't care about at all. # So, use South's fake ORM dictionary to figure out how to create those columns. db.add_column('gis_neighborhoods', 'stacked', orm['bmabr.neighborhood:stacked']) db.add_column('gis_neighborhoods', 'annoline1', orm['bmabr.neighborhood:annoline1']), db.add_column('gis_neighborhoods', 'annoline2', orm['bmabr.neighborhood:annoline2']), db.add_column('gis_neighborhoods', 'annoline3', orm['bmabr.neighborhood:annoline3']), db.add_column('gis_neighborhoods', 'annoangle', orm['bmabr.neighborhood:annoangle']), db.commit_transaction() # Now load the data. db.start_transaction() HERE = os.path.abspath(os.path.dirname(__file__)) sql_path = os.path.abspath( os.path.join(HERE, '..', '..', 'sql', 'gis_neighborhoods.sql')) db.execute_many(open(sql_path).read()) db.execute("UPDATE gis_neighborhoods SET state = 'NY'") db.commit_transaction() # Now clean up the crap we don't want. db.start_transaction() db.delete_column('gis_neighborhoods', 'stacked') db.delete_column('gis_neighborhoods', 'annoangle') db.delete_column('gis_neighborhoods', 'annoline1') db.delete_column('gis_neighborhoods', 'annoline2') db.delete_column('gis_neighborhoods', 'annoline3') db.commit_transaction()
def forwards(self, orm): # Adding model 'AddressRecord' db.create_table('address_record', ( ('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), ('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)), ('ip_str', self.gf('django.db.models.fields.CharField')(max_length=39)), ('ip_upper', self.gf('django.db.models.fields.BigIntegerField')(null=True, blank=True)), ('ip_lower', self.gf('django.db.models.fields.BigIntegerField')(null=True, blank=True)), ('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)), ('domain', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['domain.Domain'])), ('label', self.gf('django.db.models.fields.CharField')(max_length=63, null=True, blank=True)), ('fqdn', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=255, null=True, blank=True)), ('ttl', self.gf('django.db.models.fields.PositiveIntegerField')(default=3600, null=True, blank=True)), ('description', self.gf('django.db.models.fields.CharField')(max_length=1000, null=True, blank=True)), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), )) db.send_create_signal('address_record', ['AddressRecord']) # Adding unique constraint on 'AddressRecord', fields ['label', 'domain', 'fqdn', 'ip_upper', 'ip_lower', 'ip_type'] db.create_unique('address_record', ['label', 'domain_id', 'fqdn', 'ip_upper', 'ip_lower', 'ip_type']) # Adding M2M table for field views on 'AddressRecord' db.create_table('address_record_views', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('addressrecord', models.ForeignKey(orm['address_record.addressrecord'], null=False)), ('view', models.ForeignKey(orm['view.view'], null=False)) )) db.create_unique('address_record_views', ['addressrecord_id', 'view_id']) db.execute_many(open("cyder/cydns/address_record/sql/addressrecord.sql").read())
def forwards(self, orm): db.delete_foreign_key('fccpublicfiles_politicalbuy', 'publicdocument_ptr_id') db.rename_column('fccpublicfiles_politicalbuy', 'publicdocument_ptr_id', 'id') db.execute_many(''' create sequence fccpublicfiles_politicalbuy_id_seq; select setval('fccpublicfiles_politicalbuy_id_seq', max(id)) from fccpublicfiles_publicdocument; alter table fccpublicfiles_politicalbuy alter column id set default nextval('fccpublicfiles_politicalbuy_id_seq'); ''')
def forwards(self, orm): curdir = os.path.dirname(os.path.abspath(__file__)) sqldir = os.path.normpath(os.path.join(curdir, '..', '..', 'sql')) cityracks_path = os.path.join(sqldir, 'gis_cityracks.sql') f = open(cityracks_path) sql = f.read() f.close() db.execute_many(sql)
def forwards(self, orm): if db.backend_name == 'mysql': db.execute_many(""" ALTER DATABASE CHARACTER SET utf8 COLLATE utf8_general_ci; ALTER TABLE student_pendingemailchange CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci; ALTER TABLE student_pendingnamechange CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci; ALTER TABLE student_usertestgroup CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci; ALTER TABLE student_usertestgroup_users CONVERT TO CHARACTER SET utf8 COLLATE utf8_general_ci; """)
def forwards(self, orm): "Make Course.id == Course.number" sql = """ SET CONSTRAINTS ALL DEFERRED; UPDATE "courses_coursemark" SET "course_id"="number" FROM "courses_course" WHERE "courses_course"."id" = "course_id"; UPDATE "courses_course" SET "id"="number"; """ db.execute_many(sql)
def forwards(self, orm): SQLDIR = os.path.join(os.path.dirname(__file__),'..','sql') print "Sql directory: '%s'" % SQLDIR sqlfile = os.path.join(SQLDIR,'dbviews-mysql.sql') print "Sql file: '%s'" % sqlfile sql = open(sqlfile).read() print "Sql to execute: views: %s lines: %s" % (sql.count('create or replace view'), sql.count('\n')) db.execute_many(sql) #db.execute_many(sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)") #remove comments print "Sql executed!"
def forwards(self, orm): # Adding model 'NYCStreet' sql_path = os.path.abspath( os.path.join(HERE, '..', '..', 'data', 'shps', 'nyc_streets', 'gis_nycstreets.sql.gz')) import gzip z = gzip.GzipFile(sql_path) db.execute_many(z.read()) db.send_create_signal('bmabr', ['NYCStreet'])
def forwards(self, orm): basepath = os.path.dirname(os.path.dirname(__file__)) configsql = os.path.join(basepath, 'sql', 'configsection.sql') signaturesql = os.path.join(basepath, 'sql', 'domainsignature.sql') # configsection.sql db.execute_many(open(configsql).read()) # domainsignature.sql db.execute_many(open(signaturesql).read())
def forwards(self, orm): "Write your forwards methods here." # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." db.execute_many(""" update fccpublicfiles_genericpublicdocument set is_visible = 't'; update fccpublicfiles_politicalbuy set is_visible = 't'; update fccpublicfiles_organization set is_visible = 't'; update fccpublicfiles_politicalspot set is_visible = 't'; update fccpublicfiles_role set is_visible = 't'; update fccpublicfiles_person set is_visible = 't'; """)
def forwards(self, orm): #This is also done in a custom manner to workwith the latin1 varchar country foreign key #like im migration 0007 db.execute_many(""" CREATE TABLE `sqp_questionbulkassignments_users` ( `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `questionbulkassignments_id` integer NOT NULL, `user_id` integer NOT NULL, UNIQUE (`questionbulkassignments_id`, `user_id`) ) ENGINE = InnoDB DEFAULT CHARSET = latin1; ALTER TABLE `sqp_questionbulkassignments_users` ADD CONSTRAINT `user_id_refs_id_ 178ee5d9` FOREIGN KEY (`user_id`) REFERENCES `auth_user` (`id`); """) db.execute(""" CREATE TABLE `sqp_questionbulkassignments` ( `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `study_id` integer NULL, `country_id` varchar(2) NULL, `language_id` integer NULL, `can_edit_text` bool NOT NULL, `can_edit_details` bool NOT NULL, `run_on_save` bool NOT NULL, `has_been_run` bool NOT NULL, `last_run_date` date NULL ) ENGINE = InnoDB DEFAULT CHARSET = latin1; """) db.execute(""" ALTER TABLE `sqp_questionbulkassignments` ADD CONSTRAINT `country_id_refs_iso_64c7b67d` FOREIGN KEY (`country_id`) REFERENCES `sqp_country` (`iso`); """) db.execute(""" ALTER TABLE `sqp_questionbulkassignments` ADD CONSTRAINT `language_id_refs_id_7797b23c` FOREIGN KEY (`language_id`) REFERENCES `sqp_language` (`id`); """) db.execute(""" ALTER TABLE `sqp_questionbulkassignments_users` ADD CONSTRAINT `questionbulkassignments_id_refs_id_32707ecc` FOREIGN KEY (`questionbulkassignments_id`) REFERENCES `sqp_questionbulkassignments` (`id`); """) db.send_create_signal('sqp', ['QuestionBulkAssignments']) # Changing field 'UserProfile.user' db.alter_column('sqp_userprofile', 'user_id', self.gf('django.db.models.fields.related.OneToOneField')(unique=True, to=orm['auth.User'])) # Adding unique constraint on 'UserProfile', fields ['user'] db.create_unique('sqp_userprofile', ['user_id'])
def backwards(self, orm): # Adding field 'Domain.forward_canonical' db.add_column('domain', 'forward_canonical', orm['domain.domain:forward_canonical']) # Adding field 'Domain.alias' db.add_column('domain', 'alias', orm['domain.domain:alias']) # Adding field 'Domain.is_active' db.add_column('domain', 'is_active', orm['domain.domain:is_active']) # replace with original view db.execute_many(DB_VIEWS_BACKWARDS)
def forwards(self, orm): # replace domain view first db.execute_many(DB_VIEWS_FORWARDS) # Deleting field 'Domain.forward_canonical' db.delete_column('domain', 'forward_canonical') # Deleting field 'Domain.alias' db.delete_column('domain', 'alias_id') # Deleting field 'Domain.is_active' db.delete_column('domain', 'is_active')
def create_mviews(): """Setups material views according to: http://tech.jonathangardner.net/wiki/PostgreSQL/Materialized_Views As a result the following functions are created: create_matview(name, name) drop_matview(name) refresh_matview(name) incremental_refresh_matview(name) and a table: matviews """ mviews_sql = os.path.join(SQL_PATH, 'mviews.sql') db.execute_many(open(mviews_sql).read())
def forwards(self, orm): # Adding model 'AddressRecord' db.create_table('address_record', ( ('created', self.gf('django.db.models.fields.DateTimeField')( auto_now_add=True, blank=True)), ('modified', self.gf('django.db.models.fields.DateTimeField')( auto_now=True, blank=True)), ('ip_str', self.gf('django.db.models.fields.CharField')(max_length=39)), ('ip_upper', self.gf('django.db.models.fields.BigIntegerField')( null=True, blank=True)), ('ip_lower', self.gf('django.db.models.fields.BigIntegerField')( null=True, blank=True)), ('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)), ('domain', self.gf('django.db.models.fields.related.ForeignKey')( to=orm['domain.Domain'])), ('label', self.gf('django.db.models.fields.CharField')( max_length=63, null=True, blank=True)), ('fqdn', self.gf('django.db.models.fields.CharField')( db_index=True, max_length=255, null=True, blank=True)), ('ttl', self.gf('django.db.models.fields.PositiveIntegerField')( default=3600, null=True, blank=True)), ('description', self.gf('django.db.models.fields.CharField')( max_length=1000, null=True, blank=True)), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), )) db.send_create_signal('address_record', ['AddressRecord']) # Adding unique constraint on 'AddressRecord', fields ['label', 'domain', 'fqdn', 'ip_upper', 'ip_lower', 'ip_type'] db.create_unique( 'address_record', ['label', 'domain_id', 'fqdn', 'ip_upper', 'ip_lower', 'ip_type']) # Adding M2M table for field views on 'AddressRecord' db.create_table( 'address_record_views', (('id', models.AutoField( verbose_name='ID', primary_key=True, auto_created=True)), ('addressrecord', models.ForeignKey(orm['address_record.addressrecord'], null=False)), ('view', models.ForeignKey(orm['view.view'], null=False)))) db.create_unique('address_record_views', ['addressrecord_id', 'view_id']) db.execute_many( open("cyder/cydns/address_record/sql/addressrecord.sql").read())
def forwards(self, orm): #This had to be custom for the foreign key to iso to work with latin1 coalation sql = \ """CREATE TABLE IF NOT EXISTS `sqp_codingchange` ( `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `coding_change_group_id` integer NOT NULL, `question_id` integer NOT NULL, `characteristic_id` integer NOT NULL, `coding_user_id` integer, `change_by_id` integer, `change_type` integer NOT NULL, `new_value` varchar(15), `new_value_by_related_lang_id` integer, `new_value_by_related_country_id` varchar(2), `processed` bool NOT NULL, `error_occured` bool NOT NULL, `processing_log` longtext ) ENGINE = InnoDB DEFAULT CHARSET = latin1; ALTER TABLE `sqp_codingchange` ADD CONSTRAINT `coding_change_group_id_refs_id_66c2105b` FOREIGN KEY (`coding_change_group_id`) REFERENCES `sqp_codingchangegroup` (`id`); ALTER TABLE `sqp_codingchange` ADD CONSTRAINT `characteristic_id_refs_id_6dbc9f64` FOREIGN KEY (`characteristic_id`) REFERENCES `sqp_characteristic` (`id`); ALTER TABLE `sqp_codingchange` ADD CONSTRAINT `new_value_by_related_country_id_refs_iso_41bf5fc6` FOREIGN KEY (`new_value_by_related_country_id`) REFERENCES `sqp_country` (`iso`); ALTER TABLE `sqp_codingchange` ADD CONSTRAINT `new_value_by_related_lang_id_refs_id_54b8e81` FOREIGN KEY (`new_value_by_related_lang_id`) REFERENCES `sqp_language` (`id`); ALTER TABLE `sqp_codingchange` ADD CONSTRAINT `coding_user_id_refs_id_6f519545` FOREIGN KEY (`coding_user_id`) REFERENCES `auth_user` (`id`); ALTER TABLE `sqp_codingchange` ADD CONSTRAINT `change_by_id_refs_id_6f519545` FOREIGN KEY (`change_by_id`) REFERENCES `auth_user` (`id`);""" db.execute_many(sql) db.send_create_signal('sqp', ['CodingChange'])
def forwards(self, orm): # Adding field 'Course.number' db.add_column('courses_course', 'number', self.gf('django.db.models.fields.IntegerField')(default=0), keep_default=False) # Update values to match Course.id for course in orm.Course.objects.all(): course.number = course.id course.save() # Make Course.number unique db.alter_column('courses_course', 'number', self.gf('django.db.models.fields.IntegerField')(unique=True)) # Changing field 'Course.id' db.execute_many(""" DROP SEQUENCE IF EXISTS courses_course_id_seq; CREATE SEQUENCE courses_course_id_seq OWNED BY courses_course.id; SELECT setval('courses_course_id_seq', (SELECT MAX(id) FROM courses_course)); ALTER TABLE courses_course ALTER COLUMN id SET DEFAULT nextval('courses_course_id_seq'); """)
def forwards(self, orm): ##More custom sql to avoid 1005 innodb foregin key restraint errors #In this case it is the latin 1 encoding of the country id that fixes the error db.execute_many(""" CREATE TABLE `sqp_questionbulkcreation_created_questions` ( `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `questionbulkcreation_id` integer NOT NULL, `question_id` integer NOT NULL, UNIQUE (`questionbulkcreation_id`, `question_id`) ) ; ALTER TABLE `sqp_questionbulkcreation_created_questions` ADD CONSTRAINT `question_id_refs_id_30c1e611` FOREIGN KEY (`question_id`) REFERENCES `sqp_question` (`id`); CREATE TABLE `sqp_questionbulkcreation` ( `id` integer AUTO_INCREMENT NOT NULL PRIMARY KEY, `item_group_id` integer NOT NULL, `country_id` varchar(2) NOT NULL, `language_id` integer NOT NULL, `run_on_save` bool NOT NULL, `has_been_run` bool NOT NULL, `last_run_date` date )ENGINE = InnoDB DEFAULT CHARSET = latin1; ALTER TABLE `sqp_questionbulkcreation` ADD CONSTRAINT `item_group_id_refs_id_7cdc9bab` FOREIGN KEY (`item_group_id`) REFERENCES `sqp_itemgroup` (`id`); ALTER TABLE `sqp_questionbulkcreation` ADD CONSTRAINT `country_id_refs_iso_27d77fd9` FOREIGN KEY (`country_id`) REFERENCES `sqp_country` (`iso`); ALTER TABLE `sqp_questionbulkcreation` ADD CONSTRAINT `language_id_refs_id_6afb7f20` FOREIGN KEY (`language_id`) REFERENCES `sqp_language` (`id`); ALTER TABLE `sqp_questionbulkcreation_created_questions` ADD CONSTRAINT `questionbulkcreation_id_refs_id_41e9b9e` FOREIGN KEY (`questionbulkcreation_id`) REFERENCES `sqp_questionbulkcreation` (`id`); ALTER TABLE `sqp_questionbulkcreation` ADD UNIQUE `unique_creation` ( `item_group_id` , `country_id` , `language_id` ); ALTER TABLE `sqp_questionbulkassignments` ADD UNIQUE `unique_assignment` ( `item_group_id` , `country_id` , `language_id` ); ALTER TABLE `sqp`.`sqp_userquestion` ADD UNIQUE `unique_user_assignment` ( `user_id` , `question_id` ); """)
def forwards(self, orm): db.execute(""" CREATE OR REPLACE VIEW hits_v AS SELECT p.id AS status_id, q.id AS content_id, p.group_id, p.crawl_id, ( SELECT main_crawl.start_time FROM main_crawl WHERE main_crawl.id = p.crawl_id) AS start_time, q.requester_id, p.hits_available, p.page_number, p.inpage_position, p.hit_expiration_date, q.requester_name, q.reward, q.html, q.description, q.title, q.keywords, q.qualifications, q.time_alloted FROM main_hitgroupstatus p LEFT JOIN main_hitgroupcontent q ON p.group_id::text = q.group_id::text WHERE NOT (p.crawl_id IN ( SELECT DISTINCT hits_mv.crawl_id FROM hits_mv ORDER BY hits_mv.crawl_id)); """) db.execute_many(""" CREATE OR REPLACE FUNCTION incremental_refresh_matview(name) RETURNS void AS ' DECLARE matview ALIAS FOR $1; entry matviews%ROWTYPE; BEGIN SELECT * INTO entry FROM matviews WHERE mv_name = matview; IF NOT FOUND THEN RAISE EXCEPTION ''Materialized view % does not exist.'', matview; END IF; EXECUTE ''INSERT INTO '' || matview || '' SELECT * FROM '' || entry.v_name; UPDATE matviews SET last_refresh=CURRENT_TIMESTAMP WHERE mv_name=matview; RETURN; END ' LANGUAGE 'plpgsql' VOLATILE SECURITY DEFINER COST 100; """)
def forwards(self, orm): # Adding model 'Range' db.create_table('range', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('ip_type', self.gf('django.db.models.fields.CharField')(max_length=1)), ('start_upper', self.gf('django.db.models.fields.BigIntegerField')(null=True)), ('start_lower', self.gf('django.db.models.fields.BigIntegerField')(null=True)), ('start_str', self.gf('django.db.models.fields.CharField')(max_length=39)), ('end_lower', self.gf('django.db.models.fields.BigIntegerField')(null=True)), ('end_upper', self.gf('django.db.models.fields.BigIntegerField')(null=True)), ('end_str', self.gf('django.db.models.fields.CharField')(max_length=39)), ('network', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['network.Network'], null=True, blank=True)), ('is_reserved', self.gf('django.db.models.fields.BooleanField')(default=False)), ('allow', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)), ('deny', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)), ('dhcpd_raw_include', self.gf('django.db.models.fields.TextField')(null=True, blank=True)), ('range_type', self.gf('django.db.models.fields.CharField')(default='st', max_length=2)), )) db.send_create_signal('range', ['Range']) # Adding unique constraint on 'Range', fields ['start_upper', 'start_lower', 'end_upper', 'end_lower'] db.create_unique('range', ['start_upper', 'start_lower', 'end_upper', 'end_lower']) # Adding model 'RangeKeyValue' db.create_table('range_kv', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('key', self.gf('django.db.models.fields.CharField')(max_length=255)), ('value', self.gf('django.db.models.fields.CharField')(max_length=255)), ('is_quoted', self.gf('django.db.models.fields.BooleanField')(default=False)), ('is_option', self.gf('django.db.models.fields.BooleanField')(default=False)), ('is_statement', self.gf('django.db.models.fields.BooleanField')(default=False)), ('has_validator', self.gf('django.db.models.fields.BooleanField')(default=False)), ('range', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['range.Range'])), )) db.send_create_signal('range', ['RangeKeyValue']) # Adding unique constraint on 'RangeKeyValue', fields ['key', 'value', 'range'] db.create_unique('range_kv', ['key', 'value', 'range_id']) db.execute_many(open("cyder/cydhcp/range/sql/range.sql").read())
def forwards(self, orm): val_sql = open(settings.PROJECT_DIR + '/data/val_create.sql').read() db.execute_many(val_sql) rel_sql = open(settings.PROJECT_DIR + '/data/rel_create.sql').read() db.execute_many(rel_sql) val_sql = open(settings.PROJECT_DIR + '/data/val.sql').read() db.execute_many(val_sql) rel_sql = open(settings.PROJECT_DIR + '/data/rel.sql').read() db.execute_many(rel_sql)
def forwards(self, orm): # Query 1 - Empty out all previous codings # Query 2 - Empty out completion records # Query 3 - Empty out previous coding suggestions # We leave any of the user's own codings on the server for the moment for non ESS stuff, where the user id >= 129 # Query 4 - Delete the 27 duplicate questions with gb code since they have no text or codings # Query 4 - Delete the 27 duplicate questions from the SS Source Country since they are duplicates of SF # Query 5 - Move all the questions in the db currently marked as source over to GB # Query 6 - Delete the source countries SF and SS from the country list # Query 7 - Make sure the GB country is called United Kingdom source_sql = """ DELETE FROM sqp_coding; DELETE FROM sqp_completion; TRUNCATE sqp_codingsuggestion; DELETE FROM sqp_question WHERE country_id IN ('GB', 'SS'); UPDATE sqp_question SET country_id = 'GB' WHERE country_id = 'SF'; DELETE FROM sqp_country WHERE iso IN ('SS', 'SF'); UPDATE sqp_country SET name = 'United Kingdom' WHERE iso = 'GB' """ db.execute_many(source_sql) ##Rerun the mtmm imports to get the correct db mtmm values val_sql = open(settings.PROJECT_DIR + '/data/val.sql').read() db.execute_many(val_sql) rel_sql = open(settings.PROJECT_DIR + '/data/rel.sql').read() db.execute_many(rel_sql)
def forwards(self, orm): try: sql = 'ALTER TABLE sqp_item DROP INDEX unique_name;' db.execute_many(sql) print "unique_name index dropped" except: print "unique_name index not dropped (most likely already deleted)" log_text = '' Q_BASE_DIR = settings.PROJECT_DIR + '/data/questions_jorge_missing/' files = [] r,d,files = os.walk(Q_BASE_DIR).next() #looking for russian A and B chars item_regex = re.compile(ur'^(P\.)?[\u0041-\u005A\u0410\u0412\u0421]{1}[0-9]{1,3}([A-Za-z\u0410\u0412\u0421\u0430\u0432\u0441]{1,3})?(\.)?$') text_area_regex = re.compile(ur'\{[A-Z]+\}') q_regex = re.compile(ur'Q{1}[0-9]{1,4}') for file_name in files: file_log_text = [] CREATED_ITEMS = 0 CREATED_QUESTIONS = 0 EDITED_QUESTIONS = 0 NOT_EDITED = 0 SKIPPED_AREAS = 0 IMPORTED_LINES = 0 SKIPPED_LINES = [] #utf-8-sig to get rid of the utf-8 BOM /ufeff #http://stackoverflow.com/questions/9228202/tokenizing-unicode-using-nltk file = codecs.open(Q_BASE_DIR + file_name, "r", "utf-8-sig") if not '.txt' in file_name: continue print "NOW CHECKING file %s" % file.name round_name, country_iso, language_iso = file_name.replace('.txt', '').split('_') language = sqp_models.Language.objects.get(iso=language_iso) country = sqp_models.Country.objects.get(iso=country_iso) round_name = round_name.replace('ESS', 'ESS Round ') study = sqp_models.Study.objects.get(name=round_name) key = None questions = {} text_areas = ['INTRO', 'QUESTION', 'ANSWERS', 'TRASH'] line_number = 0 for line in file: line_number += 1 #Get rid of any Q13 Q12 crap if q_regex.match(line): line = re.sub(q_regex, '', line).strip() key = None if item_regex.match(line.strip()): key = item_regex.match(line.strip()).group(0) #russian chars key = key.replace(u'\u0410', 'A') key = key.replace(u'\u0412', 'B') key = key.replace(u'\u0421', 'C') key = key.replace(u'\u0430', 'a') key = key.replace(u'\u0432', 'b') key = key.replace(u'\u0441', 'c') #P. key = key.replace('P.', '') key = key.replace(' ', '') #Trailing . key = key.replace('.', '') questions[key] = {'INTRO' : '', 'QUESTION' : '', 'ANSWERS' : '', 'found_text_areas' : [] } current_text_area = 'QUESTION' continue elif key and text_area_regex.match(line): match = text_area_regex.match(line).group(0) current_text_area = match.replace('{', '').replace('}', '') if current_text_area == 'ANSWERS 1': current_text_area ='ANSWERS' elif current_text_area == 'ANSWERS 2': SKIPPED_AREAS += 1 continue if current_text_area in questions[key]['found_text_areas']: current_text_area = 'TRASH' else: questions[key]['found_text_areas'].append(current_text_area) if current_text_area not in text_areas: raise Exception('Unrecognized text area "%s"' % current_text_area) continue #Only take the first occurence of QUESTION / INTRO / ANSWERS if key and current_text_area != 'TRASH': questions[key][current_text_area] += line IMPORTED_LINES += 1 elif line.strip() != '': SKIPPED_LINES.append({'line_number' : line_number, 'content': line}) n = 0 for key in questions: n +=1 #if n > 10:break #print "NOW SAVING question %s" % key try: item, i_was_created = sqp_models.Item.objects.get_or_create(admin=key, study=study) if i_was_created: CREATED_ITEMS += 1 except: print '!!!!!!!!!!BAD KEY!!!!!!!!!!!!!!!%s' % key file_log_text.append('!!!!!!!!!!BAD KEY!!!!!!!!!!!!!!!%s' % key) #raise Exception() question, q_was_created = sqp_models.Question.objects.get_or_create(item=item, country=country, language=language) if q_was_created: CREATED_QUESTIONS += 1 if question.rfa_text or question.introduction_text or question.answer_text: NOT_EDITED += 1 else: question.introduction_text = questions[key]['INTRO'].strip() question.rfa_text = questions[key]['QUESTION'].strip() question.answer_text = questions[key]['ANSWERS'].strip() if q_was_created: question.imported_from = 'jorge-created' else: question.imported_from = 'jorge-existing' question.save(create_suggestions = False) EDITED_QUESTIONS += 1 file_log_text.append('%s %s %s new items:%s, total qs:%s, created qs:%s, edited qs:%s, not edited qs:%s, skipped keys:%s' %\ (country_iso, language_iso, round_name, CREATED_ITEMS, len(questions), CREATED_QUESTIONS, EDITED_QUESTIONS, NOT_EDITED, SKIPPED_AREAS)) file_log_text.append('LINES SKIPPED %s / IMPORTED %s' % (len(SKIPPED_LINES), IMPORTED_LINES)) if SKIPPED_LINES: file_log_text.append('SKIPPED_LINES') for l in SKIPPED_LINES: file_log_text.append(' %s: %s' % (l['line_number'], l['content'].replace('\n', ''))) file_log_text.append('IMPORTED ITEMS: %s' % ','.join(questions.keys())) file_log_text.append('------------------------------------------------------------------------') print '\n'.join(file_log_text) print log_text += '\n'.join(file_log_text) + '\n\n\n' log_file = codecs.open('/tmp/jorge_import.log', 'w', "utf-8-sig") log_file.write(log_text) log_file.close() print "LOG STORED AT '/tmp/jorge_import.log'"
def reindex_fts(): from south.db import db if db.backend_name == "postgres": db.start_transaction() db.execute_many("UPDATE forum_noderevision set id = id WHERE TRUE;") db.commit_transaction()
def forwards(self, orm): for ftpuser in orm.FtpUser.objects.all(): ftpuser.profile = ftpuser.domain.profile ftpuser.save() db.execute_many(DB_VIEWS_FORWARDS)
def enable_triggers(): if db.backend_name == "postgres": db.start_transaction() db.execute_many(commands.PG_ENABLE_TRIGGERS) db.commit_transaction()
def reset_sequences(): if db.backend_name == "postgres": db.start_transaction() db.execute_many(commands.PG_SEQUENCE_RESETS) db.commit_transaction()
def forwards(self, orm): db.execute_many(""" CREATE EXTENSION pg_trgm; CREATE INDEX descripcion_trgm_idx ON precios_producto USING gist (descripcion gist_trgm_ops); CREATE INDEX busqueda_trgm_idx ON precios_producto USING gist (busqueda gist_trgm_ops); """)
def backwards(self, orm): db.execute_many('delete * from carrier;') db.execute_many('delete * from bin;')
def reset_sequences(): from south.db import db if db.backend_name == "postgres": db.start_transaction() db.execute_many(PG_SEQUENCE_RESETS) db.commit_transaction()
def forwards(self, orm): "Write your forwards methods here." # Note: Remember to use orm['appname.ModelName'] rather than "from appname.models..." db.execute_many("update locations_address set is_visible = 't'; update locations_addresslabel set is_visible = 't';")
def enable_triggers(): from south.db import db if db.backend_name == "postgres": db.start_transaction() db.execute_many(PG_ENABLE_TRIGGERS) db.commit_transaction()
def backwards(self, orm): db.execute_many('delete * from shipping_zone;') db.execute_many('delete * from shipping_country;') db.execute_many('delete * from state;')
def forwards(self, orm): path = lambda p: os.path.join(os.path.dirname(__file__), p) db.execute_many(open(path('zone.sql')).read().decode('utf-8')) db.execute_many(open(path('country.sql')).read().decode('utf-8')) db.execute_many(open(path('state.sql')).read().decode('utf-8'))
def backwards(self, orm): db.execute_many(open("{0}/ods-views-drop.sql".format(current_dir)).read())
def backwards(self, orm): mviews_drop_sql = os.path.join( settings.ROOT_PATH, 'src/mturk/main/migrations/mviews_drop.sql') db.execute_many(open(mviews_drop_sql).read())
def forwards(self, orm): basepath = os.path.dirname(os.path.dirname(__file__)) listsql = os.path.join(basepath, 'sql', 'list.sql') # list.sql db.execute_many(open(listsql).read())
def forwards(self, orm): #This is manual becuase south or django fails to look up the type of column when making a foreign key index. db.execute_many("ALTER TABLE `sqp_questionbulkcreation` ADD `copy_text_from_study_id` INT( 10 ) UNSIGNED NOT NULL ;")
def handle(self, *args, **options): failed = False failed_number = 0 failed_msg = defaultdict(list) sql = '' nothing_to_do = False no_diff_msg = 'no SQL output\n' for appname in settings.INSTALLED_APPS: if '.' in appname: appname = appname.split('.')[-1] orig_stdout = sys.stdout try: content = StringIOisatty() error = StringIOisatty() sys.stdout = content call_command('sqldiff', appname, stderr=error) except: failed_number += 1 msg = error.getvalue().replace('\n', '').replace( appname, '<app_name>') failed_msg[msg].append(appname) failed = True finally: sys.stdout = orig_stdout out = content.getvalue() if not failed: self.stdout.write("Looked up for " + appname + '\n') if '-- No differences' not in out: sql += out.replace('\x1b[33mBEGIN;\x1b[0m\n', '').replace('\x1b[33mCOMMIT;\x1b[0m\n', '') else: no_diff_msg = out failed = False if sql == '': sql = no_diff_msg nothing_to_do = True self.stdout.write("Done.\n") if failed_number > 0: self.stdout.write( "{} apps weren't diffed:\n".format(failed_number)) self.stdout.write('\n'.join([ ' {1}:\n {0}'.format(k, ' '.join(iter(v))) for k, v in failed_msg.iteritems() ]) + '\n') self.stdout.write("\nSQL diff result :\n" + sql) if 'Table missing' in sql: msg = ( "\nThere are missing tables, make sure you did a syncdb" " and/or migrate because\n it's probably a bad idea to apply" " the above SQL code.\n") self.stdout.write(msg) return if not nothing_to_do and options.get('interactive', True): msg = ( "\nWe detected some discrepancies between your models " "and the current database schema.\nWould you like to apply them" " now? Be careful this may f**k up everything. (yes I want/no): " ) confirm = raw_input(msg) while 1: if confirm not in ('yes I want', 'no'): confirm = raw_input( 'Please enter either "yes I want" or "no": ') continue if confirm == 'yes I want': sql = re.sub('\x1b.*?m', '', sql) db.start_transaction() db.execute_many(sql) db.commit_transaction() print 'ok' break return
def forwards(self, orm): "Write your forwards migration here" # TODO: # migrate as much as possible to south db api # Model 'Publishable' db.create_table('core_publishable', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('content_type', models.ForeignKey(ContentType)), ('category', models.ForeignKey(Category, verbose_name=_(Category))), ('title', models.CharField(_('Title'), max_length=255)), ('slug', models.SlugField(_('Slug'), max_length=255)), ('source', models.ForeignKey(Source, blank=True, null=True, verbose_name=_(Source))), ('photo', models.ForeignKey(Photo, blank=True, null=True, verbose_name=_(Photo))), ('description', models.TextField(_('Description'))), )) # TODO: create ContentType for Publishable # Mock Models Publishable = db.mock_model(model_name='Publishable', db_table='core_publishable', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) Author = db.mock_model(model_name='Author', db_table='core_author', db_tablespace='', pk_field_name='id', pk_field_type=models.AutoField, pk_field_args=[], pk_field_kwargs={}) # M2M field 'Publishable.authors' db.create_table('core_publishable_authors', ( ('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)), ('publishable', models.ForeignKey(Publishable, null=False)), ('author', models.ForeignKey(Author, null=False)) )) # add a temporary column to remember the old ID db.add_column('core_publishable', 'old_id', models.IntegerField(null=True)) for app, mod in MODELS: table = app + '_' + mod # move the data db.execute(''' INSERT INTO `core_publishable` (old_id, title, slug, category_id, source_id, photo_id, description, content_type_id) SELECT a.id, title, slug, category_id, source_id, photo_id, perex, ct.id FROM `%(table)s` a, `django_content_type` ct WHERE ct.`app_label` = '%(app)s' AND ct.`model` = '%(mod)s'; ''' % {'app':app, 'mod': mod, 'table': table} ) # add link to parent db.add_column(table, 'publishable_ptr_id', models.IntegerField(null=True)) # update the link db.execute(''' UPDATE `core_publishable` pub INNER JOIN `%(table)s` art ON (art.`id` = pub.`old_id`) SET art.`publishable_ptr_id` = pub.`id` WHERE pub.`content_type_id` = (SELECT ct.`id` FROM `django_content_type` ct WHERE ct.`app_label` = '%(app)s' AND ct.`model` = '%(mod)s'); ''' % {'app':app, 'mod': mod, 'table': table} ) # TODO: we could use introspection to get the FK name in order to drop it, then we could also move this into the loop db.execute_many(''' # drop PRIMARY KEY ALTER TABLE `articles_article_authors` DROP FOREIGN KEY `article_id_refs_id_1bb2108a`; ALTER TABLE `articles_article` CHANGE `id` `id` integer NULL; ALTER TABLE `articles_article` DROP PRIMARY KEY; ALTER TABLE `events_event_authors` DROP FOREIGN KEY `event_id_refs_id_2b6d1de2`; ALTER TABLE `events_event` CHANGE `id` `id` integer NULL; ALTER TABLE `events_event` DROP PRIMARY KEY; ''') for app, mod in MODELS: table = app + '_' + mod # replace it with a link to parent db.alter_column(table, 'publishable_ptr_id', models.ForeignKey(Publishable, primary_key=True)) # update authors db.execute(''' INSERT INTO `core_publishable_authors` (`publishable_id`, `author_id`) SELECT art.`publishable_ptr_id`, art_aut.`author_id` FROM `%(table)s` art INNER JOIN `%(table)s_authors` art_aut ON (art.`id` = art_aut.`%(mod)s_id`); ''' % {'app':app, 'mod': mod, 'table': table} ) db.delete_table(table + '_authors') # UPDATE generic relations db.execute_many(''' UPDATE `tagging_taggeditem` gen INNER JOIN `core_publishable` pub ON (gen.`content_type_id` = pub.`content_type_id` AND gen.`object_id` = pub.`old_id`) SET gen.`object_id` = pub.`id` WHERE pub.`content_type_id` = (SELECT ct.`id` FROM `django_content_type` ct WHERE ct.`app_label` = '%(app)s' AND ct.`model` = '%(mod)s'); UPDATE `comments_comment` gen INNER JOIN `core_publishable` pub ON (gen.`target_ct_id` = pub.`content_type_id` AND gen.`target_id` = pub.`old_id`) SET gen.`target_id` = pub.`id` WHERE pub.`content_type_id` = (SELECT ct.`id` FROM `django_content_type` ct WHERE ct.`app_label` = '%(app)s' AND ct.`model` = '%(mod)s'); ''' % {'app':app, 'mod': mod, 'table': table} ) db.execute(''' ALTER TABLE `articles_article` DROP FOREIGN KEY `photo_id_refs_id_573d4575`; ''') db.add_column('core_placement', 'publishable_id', models.IntegerField(null=True)) for app, mod in MODELS: table = app + '_' + mod # drop duplicate columns for column in ['title', 'category_id', 'photo_id', 'source_id', 'slug', 'id', 'perex']: db.delete_column(table, column) # MIGRATE PLACEMENTS db.execute(''' UPDATE `core_placement` plac INNER JOIN `core_publishable` pub ON (plac.`target_ct_id` = pub.`content_type_id` AND plac.`target_id` = pub.`old_id`) SET plac.`publishable_id` = pub.`id` WHERE pub.`content_type_id` = (SELECT ct.`id` FROM `django_content_type` ct WHERE ct.`app_label` = '%(app)s' AND ct.`model` = '%(mod)s'); ''' % {'app':app, 'mod': mod, 'table': table} ) db.alter_column('core_placement', 'publishable_id', models.ForeignKey(Publishable)) db.execute(''' ALTER TABLE `core_placement` DROP FOREIGN KEY `core_placement_ibfk_2`; ''') db.create_index('core_placement', ['publishable_id']) db.delete_column('core_placement', 'target_ct_id') db.delete_column('core_placement', 'target_id') # delete temporary column to remember the old ID db.delete_column('core_publishable', 'old_id')
ALTER TABLE "emailconfirmation_emailaddress" RENAME TO "account_emailaddress"; ALTER TABLE "emailconfirmation_emailconfirmation" RENAME TO "account_emailconfirmation"; DROP TABLE "account_passwordreset"; ALTER TABLE "account_signupcode" ALTER COLUMN "code" TYPE varchar(64); ALTER TABLE "account_signupcode" ADD CONSTRAINT "account_signupcode_code_key" UNIQUE ("code"); ALTER TABLE "account_emailconfirmation" RENAME COLUMN "confirmation_key" TO "key"; ALTER TABLE "account_emailconfirmation" ALTER COLUMN "key" TYPE varchar(64); ALTER TABLE "account_emailconfirmation" ADD UNIQUE ("key"); ALTER TABLE account_emailconfirmation ADD COLUMN created timestamp with time zone; UPDATE account_emailconfirmation SET created = sent; """ sql2 = """ ALTER TABLE account_emailconfirmation ALTER COLUMN created SET NOT NULL; ALTER TABLE account_emailconfirmation ALTER COLUMN sent DROP NOT NULL; ALTER TABLE "account_emailaddress" ADD CONSTRAINT "account_emailaddress_email_key" UNIQUE ("email"); ALTER TABLE "account_emailaddress" DROP CONSTRAINT "emailconfirmation_emailaddress_user_id_email_key"; """ print sql db.start_transaction() db.execute_many(sql) db.commit_transaction() print sql2 db.start_transaction() db.execute_many(sql2) db.commit_transaction() print "Done."