def execute(self, sql, params=[], print_all_errors=True): """ Executes the given SQL statement, with optional parameters. If the instance's debug attribute is True, prints out what it executes. """ self._possibly_initialise() cursor = self._get_connection().cursor() if self.debug: print(" = %s" % sql, params) if self.dry_run: return [] get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params))) try: cursor.execute(sql, params) except DatabaseError as e: if print_all_errors: self._print_sql_error(e, sql, params) raise try: return cursor.fetchall() except BaseException: return []
def _column_cp(self, table_name, column_old, column_new, *args, **opts): # Copy foreign key constraint try: constraint = self._find_foreign_constraints( table_name, column_old)[0] refs = self._lookup_constraint_references(table_name, constraint) if refs is not None: (ftable, fcolumn) = refs if ftable and fcolumn: fk_sql = self.foreign_key_sql(table_name, column_new, ftable, fcolumn) get_logger().debug("Foreign key SQL: " + fk_sql) self.add_deferred_sql(fk_sql) except IndexError: pass # No constraint exists so ignore except DryRunError: pass # Copy constraints referring to this column try: reverse = self._lookup_reverse_constraint(table_name, column_old) for cname, rtable, rcolumn in reverse: fk_sql = self.foreign_key_sql(rtable, rcolumn, table_name, column_new) self.add_deferred_sql(fk_sql) except DryRunError: pass return func(self, table_name, column_old, column_new, *args, **opts)
def _column_cp(self, table_name, column_old, column_new, *args, **opts): # Copy foreign key constraint try: constraint = self._find_foreign_constraints( table_name, column_old)[0] refs = self._lookup_constraint_references(table_name, constraint) if refs is not None: (ftable, fcolumn) = refs if ftable and fcolumn: fk_sql = self.foreign_key_sql( table_name, column_new, ftable, fcolumn) get_logger().debug("Foreign key SQL: " + fk_sql) self.add_deferred_sql(fk_sql) except IndexError: pass # No constraint exists so ignore except DryRunError: pass # Copy constraints referring to this column try: reverse = self._lookup_reverse_constraint(table_name, column_old) for cname, rtable, rcolumn in reverse: fk_sql = self.foreign_key_sql( rtable, rcolumn, table_name, column_new) self.add_deferred_sql(fk_sql) except DryRunError: pass return func(self, table_name, column_old, column_new, *args, **opts)
def execute(self, sql, params=[], print_all_errors=True): """ Executes the given SQL statement, with optional parameters. If the instance's debug attribute is True, prints out what it executes. """ self._possibly_initialise() cursor = self._get_connection().cursor() if self.debug: print(" = %s" % sql, params) if self.dry_run: return [] get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params))) try: cursor.execute(sql, params) except DatabaseError as e: if print_all_errors: self._print_sql_error(e, sql, params) raise try: return cursor.fetchall() except: return []
def backwards(self, orm): table, search_vector_column, index_name = self._reflect(orm) # Removing index on 'Event', fields ['search_vector'] sql = db.drop_index_string % {'index_name': db.quote_name(index_name)} get_logger().info('>>> DROPPING FTS INDEX') db.execute(sql)
def forwards(self, orm): table, search_vector_column, index_name = self._reflect(orm) # Adding index on 'Event', fields ['search_vector'] index_sql = 'CREATE INDEX %s ON %s USING gin(%s);' % ( db.quote_name(index_name), db.quote_name(table), db.quote_name(search_vector_column)) get_logger().info('>>> ADDING FTS INDEX') db.execute(index_sql)
def forwards(self, orm): table, search_vector_column, index_name = self._reflect(orm) # Adding index on 'Event', fields ['search_vector'] index_sql = 'CREATE INDEX %s ON %s USING gin(%s);' % ( db.quote_name(index_name), db.quote_name(table), db.quote_name(search_vector_column) ) get_logger().info('>>> ADDING FTS INDEX') db.execute(index_sql)
def run(self): """ Set up environment and run tests. Returns number of errors """ # Set up south logger get_logger().setLevel(self.loglevel) # Set up system logger logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(message)s") # Prepare environment and run tests with self.test_environment(): # Get test suite modules, tests = self.get_modules(self.test_labels) # Check modules are found if len(modules) == 0 and len(tests) == 0: if self.beef: self.reuse_db = True else: self.info("No modules to test. Exiting") return 0 # Run test suite in database and coverage context with self.coverage(): with self.databases(reuse=self.reuse_db): # Initialize database: Wrap as tests if not self.reuse_db: management.call_command("sync-perm") #management.call_command("sync-pyrules") management.call_command("collection", "--sync") management.call_command("beef", "--pull") # Add as tests suite = self.get_suite(modules, tests) self.info("Running test suite") runner = unittest.TextTestRunner(verbosity=self.verbosity, resultclass=NOCTestResult) self.result = runner.run(suite) self.info("Test suite completed") # Return summary if self.result: if self.junit_xml_out: self.result.write_xml(self.junit_xml_out) else: self.result.dump_result() return len(self.result.failures) + len(self.result.errors) else: return 1
def backwards(self, orm): meta = self._reflect(orm) sql = 'DROP TRIGGER %(trigger_name)s ON %(table_name)s;' % { 'trigger_name': db.quote_name(meta['trigger_name']), 'table_name': db.quote_name(meta['table_name']), } get_logger().info('>>> DROPPING FTS TRIGGER') db.execute(sql) sql = 'DROP FUNCTION %(tsv_func)s;' % { 'tsv_func': self.tsv_func, } get_logger().info('>>> DROPPING TSV FUNCTION') db.execute(sql)
def execute(self, sql, params=[]): """ Executes the given SQL statement, with optional parameters. If the instance's debug attribute is True, prints out what it executes. """ cursor = self._get_connection().cursor() if self.debug: print " = %s" % sql, params get_logger().debug('south execute "%s" with params "%s"' % (sql, params)) if self.dry_run: return [] cursor.execute(sql, params) try: return cursor.fetchall() except: return []
def migrate_contenttype(self, from_app, to_app, models=None): models = models or self._models # ideally, we should have frozen content types too, but we're lazy. q = ContentType.objects.filter( app_label=from_app, model__in=models, ) # sanity check; just warn, don't have to error out if not len(q) == len(models): get_logger().warning( "Not all content types for models (%s) in app %s were found" % \ (models, from_app)) for ct in q: ct.app_label = to_app ct.save() get_logger().info("Updated content type for model %s (ID: %s)" % (ct.model, ct.id))
def execute(self, sql, params=[]): """ Executes the given SQL statement, with optional parameters. If the instance's debug attribute is True, prints out what it executes. """ self._possibly_initialise() cursor = self._get_connection().cursor() if self.debug: print " = %s" % sql, params if self.dry_run: return [] get_logger().debug('execute "%s" with params "%s"' % (sql, params)) try: cursor.execute(sql, params) except DatabaseError, e: print >> sys.stderr, 'FATAL ERROR - The following SQL query failed: %s' % sql print >> sys.stderr, 'The error was: %s' % e sys.exit(1)
def execute(self, sql, params=[]): """ Executes the given SQL statement, with optional parameters. If the instance's debug attribute is True, prints out what it executes. """ self._possibly_initialise() cursor = self._get_connection().cursor() if self.debug: print " = %s" % sql, params get_logger().debug('south execute "%s" with params "%s"' % (sql, params)) if self.dry_run: return [] try: cursor.execute(sql, params) except DatabaseError, e: print >> sys.stderr, 'FATAL ERROR - The following SQL query failed: %s' % sql print >> sys.stderr, 'The error was: %s' % e sys.exit(1)
def forwards(self, orm): meta = self._reflect(orm) sql = ' '.join(( 'CREATE FUNCTION %(tsv_func)s RETURNS trigger AS $$', 'begin', 'new.%(index_column)s :=', "setweight(to_tsvector('pg_catalog.english',", "coalesce(new.%(title_column)s, '')), 'A') ||", "setweight(to_tsvector('pg_catalog.english',", "coalesce(new.%(description_column)s, '')), 'B');", 'return new;', 'end', '$$ LANGUAGE plpgsql;' )) % { 'tsv_func': self.tsv_func, 'index_column': db.quote_name(meta['index_column']), 'title_column': meta['title_column'], 'description_column': meta['description_column'], } get_logger().info('>>> ADDING FTS FUNCTION') db.execute(sql) sql = ' '.join(( 'CREATE TRIGGER %(trigger_name)s BEFORE INSERT OR UPDATE', 'ON %(table_name)s FOR EACH ROW EXECUTE PROCEDURE %(tsv_func)s;' )) % { 'trigger_name': db.quote_name(meta['trigger_name']), 'table_name': db.quote_name(meta['table_name']), 'tsv_func': self.tsv_func, } get_logger().info('>>> ADDING FTS TRIGGER') db.execute(sql)
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): """ Creates the SQL snippet for a column. Used by add_column and add_table. """ # If the field hasn't already been told its attribute name, do so. if not field_prepared: field.set_attributes_from_name(field_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() # Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL) field = self._field_sanity(field) try: sql = field.db_type(connection=self._get_connection()) except TypeError: sql = field.db_type() if sql: # Some callers, like the sqlite stuff, just want the extended type. if with_name: field_output = [self.quote_name(field.column), sql] else: field_output = [sql] field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) if field.primary_key: field_output.append('PRIMARY KEY') elif field.unique: # Just use UNIQUE (no indexes any more, we have delete_unique) field_output.append('UNIQUE') tablespace = field.db_tablespace or tablespace if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique: # We must specify the index tablespace inline, because we # won't be generating a CREATE INDEX statement for this field. field_output.append( self._get_connection().ops.tablespace_sql( tablespace, inline=True)) sql = ' '.join(field_output) sqlparams = () # if the field is "NOT NULL" and a default value is provided, create the column with it # this allows the addition of a NOT NULL field to a table with existing rows if not getattr(field, '_suppress_default', False): if field.has_default(): default = field.get_default() # If the default is a callable, then scrap it as we dont want python-based defaults # written into the database if callable(default): get_logger().warn(text_type('discarded column default "%r" on "%s"' % (default, table_name))) default = None # If the default is actually None, don't add a default term if default is not None: default = field.get_db_prep_save(default, connection=self._get_connection()) default = self._default_value_workaround(default) # Now do some very cheap quoting. TODO: Redesign return values to avoid # this. if isinstance(default, string_types): default = "'%s'" % default.replace("'", "''") # Escape any % signs in the output (bug #317) if isinstance(default, string_types): default = default.replace("%", "%%") # Add it in sql += " DEFAULT %s" sqlparams = (default) elif (not field.null and field.blank) or (field.get_default() == ''): if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls: sql += " DEFAULT ''" # Error here would be nice, but doesn't seem to play fair. # else: # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") if field.rel and self.supports_foreign_keys: self.add_deferred_sql( self.foreign_key_sql( table_name, field.column, field.rel.to._meta.db_table, field.rel.to._meta.get_field(field.rel.field_name).column ) ) # Things like the contrib.gis module fields have this in 1.1 and below if hasattr(field, 'post_create_sql'): for stmt in field.post_create_sql(no_style(), table_name): self.add_deferred_sql(stmt) # In 1.2 and above, you have to ask the DatabaseCreation stuff for it. # This also creates normal indexes in 1.1. if hasattr(self._get_connection().creation, "sql_indexes_for_field"): # Make a fake model to pass in, with only db_table model = self.mock_model("FakeModelForGISCreation", table_name) for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): self.add_deferred_sql(stmt) if sql: return sql % sqlparams else: return None
def backwards(self, orm): get_logger().warning( "Unable to effect a migration to 'zero' on the product modules;" \ "please do so manually." )
def forwards(self, orm): # Adding model 'AttributeOption' db.create_table('product_attributeoption', ( ('name', self.gf('django.db.models.fields.SlugField')(max_length=100, db_index=True)), ('error_message', self.gf('django.db.models.fields.CharField')(default=u'Inavlid Entry', max_length=100)), ('sort_order', self.gf('django.db.models.fields.IntegerField')(default=1)), ('validation', self.gf('django.db.models.fields.CharField')(max_length=100)), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('description', self.gf('django.db.models.fields.CharField')(max_length=100)), )) db.send_create_signal('product', ['AttributeOption']) # TODO add default validation for AttributeOption from product.models import VALIDATIONS as validations default_validation = validations[0][0] if not db.dry_run: for attr in orm['product.productattribute'].objects.all(): orm['product.attributeoption'].objects.create( description='', name=attr.name, validation=default_validation, ) if db.backend_name=='sqlite3': get_logger().debug("dropping and re-creating table for ProductAttribute") if db.dry_run: return # # We re-create ProductAttribute, since sqlite does not support adding # foreign key constraints on existing tables (ie. adding ForeignKey # fields). # # We have to do 0003's work here, because we can't iterate over # ProductAttribute instances there - the 'option' column has not # been created and django barfs if we do so. # # Collect old data old_attrs = {} for attr in orm['product.ProductAttribute'].objects.all(): obj = {} # We have already collected 'name' earlier, so we can leave it # out. # TODO make this more generic for k in ('product', 'languagecode', 'value'): obj[k] = getattr(attr, k) old_attrs[attr.id] = obj # Deleting old 'ProductAttribute' table db.delete_table('product_productattribute') # Re-use create_table expression for old 'ProductAttribute', this # time with the adding the 'option' column db.create_table('product_productattribute', ( ('languagecode', self.gf('django.db.models.fields.CharField')(max_length=10, null=True, blank=True)), ('product', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['product.Product'])), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('value', self.gf('django.db.models.fields.CharField')(max_length=255)), ('name', self.gf('django.db.models.fields.SlugField')(max_length=100, db_index=True)), ('option', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['product.AttributeOption'])) )) db.send_create_signal('product', ['ProductAttribute']) # Add back data for id, attr_dict in old_attrs.items(): kwargs = {} for field in ('product', 'languagecode', 'value'): kwargs[field] = attr_dict[field] orm['product.ProductAttribute'].objects.create( id=id, **kwargs)
def forwards(self, orm): # Adding model 'AttributeOption' db.create_table('product_attributeoption', ( ('name', self.gf('django.db.models.fields.SlugField')( max_length=100, db_index=True)), ('error_message', self.gf('django.db.models.fields.CharField')( default=u'Inavlid Entry', max_length=100)), ('sort_order', self.gf('django.db.models.fields.IntegerField')(default=1)), ('validation', self.gf('django.db.models.fields.CharField')(max_length=100)), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('description', self.gf('django.db.models.fields.CharField')(max_length=100)), )) db.send_create_signal('product', ['AttributeOption']) # TODO add default validation for AttributeOption from product.models import VALIDATIONS as validations default_validation = validations[0][0] if not db.dry_run: for attr in orm['product.productattribute'].objects.all(): if orm['product.attributeoption'].objects.filter( name__exact=attr.name).count() < 1: orm['product.attributeoption'].objects.create( description=attr.name, name=attr.name, validation=default_validation, ) if db.backend_name == 'sqlite3': get_logger().debug( "dropping and re-creating table for ProductAttribute") if db.dry_run: return # # We re-create ProductAttribute, since sqlite does not support adding # foreign key constraints on existing tables (ie. adding ForeignKey # fields). # # We have to do 0003's work here, because we can't iterate over # ProductAttribute instances there - the 'option' column has not # been created and django barfs if we do so. # # Collect old data old_attrs = {} for attr in orm['product.ProductAttribute'].objects.all(): obj = {} # We have already collected 'name' earlier, so we can leave it # out. # TODO make this more generic for k in ('product', 'languagecode', 'value'): obj[k] = getattr(attr, k) old_attrs[attr.id] = obj # Deleting old 'ProductAttribute' table db.delete_table('product_productattribute') # Re-use create_table expression for old 'ProductAttribute', this # time with the adding the 'option' column db.create_table( 'product_productattribute', (('languagecode', self.gf('django.db.models.fields.CharField')( max_length=10, null=True, blank=True)), ('product', self.gf('django.db.models.fields.related.ForeignKey')( to=orm['product.Product'])), ('id', self.gf('django.db.models.fields.AutoField')( primary_key=True)), ('value', self.gf('django.db.models.fields.CharField')( max_length=255)), ('name', self.gf('django.db.models.fields.SlugField')( max_length=100, db_index=True)), ('option', self.gf('django.db.models.fields.related.ForeignKey')( to=orm['product.AttributeOption'])))) db.send_create_signal('product', ['ProductAttribute']) # Add back data for id, attr_dict in old_attrs.items(): kwargs = {} for field in ('product', 'languagecode', 'value'): kwargs[field] = attr_dict[field] orm['product.ProductAttribute'].objects.create(id=id, **kwargs)
# -*- coding: utf-8 -*- import datetime import hashlib import sys from contextlib import closing from urllib2 import urlopen from south.db import db from south.logger import get_logger from south.v2 import DataMigration from django.db import models from django.conf import settings logger = get_logger() class Migration(DataMigration): @staticmethod def get_actual_url(df): from urlparse import urlparse # Can't handle custom protocols try: if df.protocol in [t[0] for t in settings.DOWNLOAD_PROVIDERS]: return None except AttributeError: pass def get_absolute_filepath(df): if df.protocol == 'staging': return df.url url = urlparse(df.url)
def backwards(self, orm): get_logger().warning( "Unable to effect a migration to '0001' on the product modules;" \ "please do so manually." )
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False): """ Creates the SQL snippet for a column. Used by add_column and add_table. """ # If the field hasn't already been told its attribute name, do so. if not field_prepared: field.set_attributes_from_name(field_name) # hook for the field to do any resolution prior to it's attributes being queried if hasattr(field, 'south_init'): field.south_init() # Possible hook to fiddle with the fields field = self._field_sanity(field) try: sql = field.db_type(connection=self._get_connection()) except TypeError: sql = field.db_type() if sql: # Some callers just want the extended type. if with_name: field_output = [self.quote_name(field.column), sql] else: field_output = [sql] field_output.append('%sNULL' % (not field.null and 'NOT ' or '')) if field.primary_key: field_output.append('PRIMARY KEY') elif field.unique: # Just use UNIQUE (no indexes any more, we have delete_unique) field_output.append('UNIQUE') tablespace = field.db_tablespace or tablespace if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique: # We must specify the index tablespace inline, because we # won't be generating a CREATE INDEX statement for this field. field_output.append( self._get_connection().ops.tablespace_sql( tablespace, inline=True)) sql = ' '.join(field_output) sqlparams = () # if the field is "NOT NULL" and a default value is provided, create the column with it # this allows the addition of a NOT NULL field to a table with existing rows if not getattr(field, '_suppress_default', False): if field.has_default(): default = field.get_default() # If the default is a callable, then scrap it as we dont want python-based defaults # written into the database if callable(default): get_logger().warn(text_type('discarded column default "%r" on "%s"' % (default, table_name))) default = None # If the default is actually None, don't add a default term if default is not None: default = field.get_db_prep_save(default, connection=self._get_connection()) default = self._default_value_workaround(default) # Now do some very cheap quoting. TODO: Redesign return values to avoid # this. if isinstance(default, string_types): default = "'%s'" % default.replace("'", "''") # Escape any % signs in the output (bug #317) if isinstance(default, string_types): default = default.replace("%", "%%") # Add it in sql += " DEFAULT %s" sqlparams = (default) elif (not field.null and field.blank) or (field.get_default() == ''): if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls: sql += " DEFAULT ''" # Error here would be nice, but doesn't seem to play fair. # else: # raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.") if field.rel and self.supports_foreign_keys: self.add_deferred_sql( self.foreign_key_sql( table_name, field.column, field.rel.to._meta.db_table, field.rel.to._meta.get_field(field.rel.field_name).column ) ) # Things like the contrib.gis module fields have this in 1.1 and below if hasattr(field, 'post_create_sql'): for stmt in field.post_create_sql(no_style(), table_name): self.add_deferred_sql(stmt) # In 1.2 and above, you have to ask the DatabaseCreation stuff for it. # This also creates normal indexes in 1.1. if hasattr(self._get_connection().creation, "sql_indexes_for_field"): # Make a fake model to pass in, with only db_table model = self.mock_model("FakeModelForGISCreation", table_name) for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()): self.add_deferred_sql(stmt) if sql: return sql % sqlparams else: return None