def evolve(self, *app_labels, **options): verbosity = int(options['verbosity']) interactive = options['interactive'] execute = options['execute'] compile_sql = options['compile_sql'] hint = options['hint'] purge = options['purge'] database = options['database'] if not database and is_multi_db(): from django.db.utils import DEFAULT_DB_ALIAS database = DEFAULT_DB_ALIAS using_args = {} if is_multi_db(): using_args['using'] = database # Use the list of all apps, unless app labels are specified. if app_labels: if execute: raise CommandError('Cannot specify an application name when ' 'executing evolutions.') try: app_list = [get_app(app_label) for app_label in app_labels] except (ImproperlyConfigured, ImportError), e: raise CommandError("%s. Are you sure your INSTALLED_APPS " "setting is correct?" % e)
def evolve(self, *app_labels, **options): verbosity = int(options["verbosity"]) interactive = options["interactive"] execute = options["execute"] compile_sql = options["compile_sql"] hint = options["hint"] purge = options["purge"] force = options["force"] database = options["database"] if not database and is_multi_db(): from django.db.utils import DEFAULT_DB_ALIAS database = DEFAULT_DB_ALIAS using_args = {} if is_multi_db(): using_args["using"] = database # Use the list of all apps, unless app labels are specified. if app_labels: if execute: raise CommandError("Cannot specify an application name when " "executing evolutions.") try: app_list = [get_app(app_label) for app_label in app_labels] except (ImproperlyConfigured, ImportError), e: raise CommandError("%s. Are you sure your INSTALLED_APPS " "setting is correct?" % e)
def execute_transaction(sql, output=False, database='default'): "A transaction wrapper for executing a list of SQL statements" my_connection = connection using_args = {} if is_multi_db(): if not database: database = DEFAULT_DB_ALIAS my_connection = connections[database] using_args['using'] = database try: # Begin Transaction transaction.enter_transaction_management(**using_args) transaction.managed(True, **using_args) cursor = my_connection.cursor() # Perform the SQL if output: write_sql(sql, database) execute_sql(cursor, sql) transaction.commit(**using_args) transaction.leave_transaction_management(**using_args) except Exception: transaction.rollback(**using_args) raise
def evolver(self, model, database=None): if is_multi_db() and database is None: db_name = router.db_for_write(model) else: db_name = database or 'default' return EvolutionOperationsMulti(db_name).get_evolver()
def evolver(self, model): db_name = None if is_multi_db(): db_name = router.db_for_write(model) return EvolutionOperationsMulti(db_name).get_evolver()
def evolver(self, model, database=None): if is_multi_db() and database is None: db_name = router.db_for_write(model) else: db_name = database or "default" return EvolutionOperationsMulti(db_name).get_evolver()
def evolver(self, model): #j--------------进化器 db_name = None if is_multi_db(): db_name = router.db_for_write(model) #j-----------------------根据模型得到数据库名 # return EvolutionOperationsMulti(db_name).get_evolver() #j------------数据库适配器 return None
def is_mutable(self, app_label, proj_sig, database): if is_multi_db(): app_sig = proj_sig[app_label] model_sig = app_sig[self.model_name] model = MockModel(proj_sig, app_label, self.model_name, model_sig) db_name = router.db_for_write(model) return db_name and db_name == database else: return True
def test_sql_mapping(test_field_name, db_name='default'): if is_multi_db(): engine = settings.DATABASES[db_name]['ENGINE'].split('.')[-1] else: engine = settings.DATABASE_ENGINE sql_for_engine = __import__('django_evolution.tests.db.%s' % (engine), {}, {}, ['']) return getattr(sql_for_engine, test_field_name)
def get_unapplied_evolutions(app, database): "Obtain the list of unapplied evolutions for an application" sequence = get_evolution_sequence(app) app_label = app.__name__.split('.')[-2] evolutions = Evolution.objects.filter(app_label=app_label) if is_multi_db(): evolutions = evolutions.using(database) applied = [evo.label for evo in evolutions] return [seq for seq in sequence if seq not in applied]
def create_app_sig(app, database): """ Creates a dictionary representation of the models in a given app. Only those attributes that are interesting from a schema-evolution perspective are included. """ app_sig = SortedDict() for model in get_models(app): # only include those who want to be syncdb if not is_multi_db() or router.allow_syncdb(database, model): app_sig[model._meta.object_name] = create_model_sig(model) return app_sig
def run_tests(verbosity=1, interactive=False): from django.conf import settings from django.core import management from django.test.utils import setup_test_environment, \ teardown_test_environment from django_evolution import is_multi_db setup_test_environment() settings.DEBUG = False old_db_names = [] if is_multi_db(): from django.db import connections for alias in connections: connection = connections[alias] old_db_names.append((connection, connection.settings_dict['NAME'])) connection.creation.create_test_db(verbosity, autoclobber=not interactive) else: from django.db import connection old_db_names.append((connection, settings.DATABASE_NAME)) connection.creation.create_test_db(verbosity, autoclobber=not interactive) management.call_command('syncdb', verbosity=verbosity, interactive=interactive) nose_argv = ['runtests.py', '-v', '--with-coverage', '--with-doctest', '--doctest-extension=.txt', '--cover-package=django_evolution', '--match=tests[\/]*.py'] if len(sys.argv) > 2: nose_argv += sys.argv[2:] nose.run(argv=nose_argv) for connection, name in old_db_names: connection.creation.destroy_test_db(name, verbosity=0) teardown_test_environment()
from django.db.models.fields import * from django.db.models.fields.related import * from django.db import models from django.utils.datastructures import SortedDict from django.utils.functional import curry from django_evolution.signature import ATTRIBUTE_DEFAULTS from django_evolution import CannotSimulate, SimulationFailure, EvolutionNotImplementedError, is_multi_db from django_evolution.db import EvolutionOperationsMulti FK_INTEGER_TYPES = [ 'AutoField', 'PositiveIntegerField', 'PositiveSmallIntegerField' ] if is_multi_db(): from django.db import router def create_field(proj_sig, field_name, field_type, field_attrs, parent_model): """ Create an instance of a field from a field signature. This is useful for accessing all the database property mechanisms built into fields. """ # related_model isn't a valid field attribute, so it must be removed # prior to instantiating the field, but it must be restored # to keep the signature consistent. related_model = field_attrs.pop('related_model', None) if related_model: related_app_name, related_model_name = related_model.split('.')
def _register_models(app_label='tests', db_name='default', *models): app_cache = SortedDict() my_connection = connection if is_multi_db(): my_connection = connections[db_name or DEFAULT_DB_ALIAS] max_name_length = my_connection.ops.max_name_length() for name, model in reversed(models): if model._meta.module_name in cache.app_models['django_evolution']: del cache.app_models['django_evolution'][model._meta.module_name] orig_db_table = model._meta.db_table orig_object_name = model._meta.object_name orig_module_name = model._meta.module_name generated_db_table = truncate_name( '%s_%s' % (model._meta.app_label, model._meta.module_name), max_name_length) if orig_db_table.startswith(generated_db_table): model._meta.db_table = '%s_%s' % (app_label, name.lower()) model._meta.db_table = truncate_name(model._meta.db_table, max_name_length) model._meta.app_label = app_label model._meta.object_name = name model._meta.module_name = name.lower() add_app_test_model(model, app_label=app_label) for field in model._meta.local_many_to_many: if not field.rel.through: continue through = field.rel.through generated_db_table = truncate_name( '%s_%s' % (orig_db_table, field.name), max_name_length) if through._meta.db_table == generated_db_table: through._meta.app_label = app_label # Transform the 'through' table information only # if we've transformed the parent db_table. if model._meta.db_table != orig_db_table: through._meta.db_table = \ '%s_%s' % (model._meta.db_table, field.name) through._meta.object_name = \ through._meta.object_name.replace( orig_object_name, model._meta.object_name) through._meta.module_name = \ through._meta.module_name.replace( orig_module_name, model._meta.module_name) through._meta.db_table = \ truncate_name(through._meta.db_table, max_name_length) for field in through._meta.local_fields: if field.rel and field.rel.to: column = field.column if (column.startswith(orig_module_name) or column.startswith('to_%s' % orig_module_name) or column.startswith('from_%s' % orig_module_name)): field.column = column.replace( orig_module_name, model._meta.module_name) if (through._meta.module_name in cache.app_models['django_evolution']): del cache.app_models['django_evolution'][ through._meta.module_name] app_cache[through._meta.module_name] = through add_app_test_model(through, app_label=app_label) app_cache[model._meta.module_name] = model return app_cache
def wrap_sql_func(func, evo_test, style, db_name=None): if is_multi_db(): return func(evo_test, style, connections[db_name or DEFAULT_DB_ALIAS]) else: return func(evo_test, style)
def create_test_data(app_models, database): deferred_models = [] deferred_fields = {} using_args = {} if is_multi_db(): using_args['using'] = database for model in app_models: params = {} deferred = False for field in model._meta.fields: if not deferred: if type(field) in (models.ForeignKey, models.ManyToManyField): related_model = field.rel.to related_q = related_model.objects.all() if is_multi_db(): related_q = related_q.using(database) if related_q.count(): related_instance = related_q[0] else: if field.null == False: # Field cannot be null yet the related object # hasn't been created yet Defer the creation of # this model deferred = True deferred_models.append(model) else: # Field cannot be set yet but null is acceptable # for the moment deferred_fields[type(model)] = \ deferred_fields.get(type(model), []).append(field) related_instance = None if not deferred: if type(field) == models.ForeignKey: params[field.name] = related_instance else: params[field.name] = [related_instance] else: params[field.name] = \ DEFAULT_TEST_ATTRIBUTE_VALUES[type(field)] if not deferred: model(**params).save(**using_args) # Create all deferred models. if deferred_models: create_test_data(deferred_models, database) # All models should be created (Not all deferred fields have been populated # yet) Populate deferred fields that we know about. Here lies untested # code! if deferred_fields: for model, field_list in deferred_fields.items(): for field in field_list: related_model = field.rel.to related_instance = related_model.objects.using(database)[0] if type(field) == models.ForeignKey: setattr(model, field.name, related_instance) else: getattr(model, field.name).add(related_instance, **using_args) model.save(**using_args)
def _register_models(app_label='tests', db_name='default', *models): app_cache = SortedDict() my_connection = connection if is_multi_db(): my_connection = connections[db_name or DEFAULT_DB_ALIAS] max_name_length = my_connection.ops.max_name_length() for name, model in reversed(models): if model._meta.module_name in cache.app_models['django_evolution']: del cache.app_models['django_evolution'][model._meta.module_name] orig_db_table = model._meta.db_table orig_object_name = model._meta.object_name orig_module_name = model._meta.module_name generated_db_table = truncate_name( '%s_%s' % (model._meta.app_label, model._meta.module_name), max_name_length) if orig_db_table.startswith(generated_db_table): model._meta.db_table = '%s_%s' % (app_label, name.lower()) model._meta.db_table = truncate_name(model._meta.db_table, max_name_length) model._meta.app_label = app_label model._meta.object_name = name model._meta.module_name = name.lower() add_app_test_model(model, app_label=app_label) for field in model._meta.local_many_to_many: if not field.rel.through: continue through = field.rel.through generated_db_table = truncate_name( '%s_%s' % (orig_db_table, field.name), max_name_length) if through._meta.db_table == generated_db_table: through._meta.app_label = app_label # Transform the 'through' table information only # if we've transformed the parent db_table. if model._meta.db_table != orig_db_table: through._meta.db_table = \ '%s_%s' % (model._meta.db_table, field.name) through._meta.object_name = \ through._meta.object_name.replace( orig_object_name, model._meta.object_name) through._meta.module_name = \ through._meta.module_name.replace( orig_module_name, model._meta.module_name) through._meta.db_table = \ truncate_name(through._meta.db_table, max_name_length) for field in through._meta.local_fields: if field.rel and field.rel.to: column = field.column if (column.startswith(orig_module_name) or column.startswith('to_%s' % orig_module_name) or column.startswith( 'from_%s' % orig_module_name)): field.column = column.replace( orig_module_name, model._meta.module_name) if (through._meta.module_name in cache.app_models['django_evolution']): del cache.app_models['django_evolution'][ through._meta.module_name] app_cache[through._meta.module_name] = through add_app_test_model(through, app_label=app_label) app_cache[model._meta.module_name] = model return app_cache
def evolution(app, created_models, verbosity=1, **kwargs): """ A hook into syncdb's post_syncdb signal, that is used to notify the user if a model evolution is necessary. """ default_db = None if is_multi_db(): from django.db.utils import DEFAULT_DB_ALIAS default_db = DEFAULT_DB_ALIAS db = kwargs.get('db', default_db) proj_sig = create_project_sig(db) signature = pickle.dumps(proj_sig) using_args = {} if is_multi_db(): using_args['using'] = db try: if is_multi_db(): latest_version = \ django_evolution.Version.objects.using(db).latest('when') else: latest_version = django_evolution.Version.objects.latest('when') except django_evolution.Version.DoesNotExist: # We need to create a baseline version. if verbosity > 0: print "Installing baseline version" latest_version = django_evolution.Version(signature=signature) latest_version.save(**using_args) for a in get_apps(): install_baseline(a, latest_version, using_args, verbosity) unapplied = get_unapplied_evolutions(app, db) if unapplied: print style.NOTICE('There are unapplied evolutions for %s.' % app.__name__.split('.')[-2]) # Evolutions are checked over the entire project, so we only need to check # once. We do this check when Django Evolutions itself is synchronized. if app == django_evolution: old_proj_sig = pickle.loads(str(latest_version.signature)) # If any models or apps have been added, a baseline must be set # for those new models changed = False new_apps = [] for app_name, new_app_sig in proj_sig.items(): if app_name == '__version__': # Ignore the __version__ tag continue old_app_sig = old_proj_sig.get(app_name, None) if old_app_sig is None: # App has been added old_proj_sig[app_name] = proj_sig[app_name] new_apps.append(app_name) changed = True else: for model_name, new_model_sig in new_app_sig.items(): old_model_sig = old_app_sig.get(model_name, None) if old_model_sig is None: # Model has been added old_proj_sig[app_name][model_name] = \ proj_sig[app_name][model_name] changed = True if changed: if verbosity > 0: print "Adding baseline version for new models" latest_version = \ django_evolution.Version(signature=pickle.dumps(old_proj_sig)) latest_version.save(**using_args) for app_name in new_apps: install_baseline(get_app(app_name), latest_version, using_args, verbosity) # TODO: Model introspection step goes here. # # If the current database state doesn't match the last # # saved signature (as reported by latest_version), # # then we need to update the Evolution table. # actual_sig = introspect_project_sig() # acutal = pickle.dumps(actual_sig) # if actual != latest_version.signature: # nudge = Version(signature=actual) # nudge.save() # latest_version = nudge diff = Diff(old_proj_sig, proj_sig) if not diff.is_empty(): print style.NOTICE( 'Project signature has changed - an evolution is required') if verbosity > 1: old_proj_sig = pickle.loads(str(latest_version.signature)) print diff
raise CommandError("%s. Are you sure your INSTALLED_APPS " "setting is correct?" % e) else: app_list = get_apps() # Iterate over all applications running the mutations evolution_required = False simulated = True sql = [] new_evolutions = [] current_proj_sig = create_project_sig(database) current_signature = pickle.dumps(current_proj_sig) try: if is_multi_db(): latest_version = Version.objects.using(database).latest('when') else: latest_version = Version.objects.latest('when') database_sig = pickle.loads(str(latest_version.signature)) diff = Diff(database_sig, current_proj_sig) except Evolution.DoesNotExist: raise CommandError("Can't evolve yet. Need to set an " "evolution baseline.") try: for app in app_list: app_label = app.__name__.split('.')[-2] if hint: evolutions = []
def get_mutations(app, evolution_labels, database): """ Obtain the list of mutations described by the named evolutions. """ # For each item in the evolution sequence. Check each item to see if it is # a python file or an sql file. try: app_name = '.'.join(app.__name__.split('.')[:-1]) if app_name in BUILTIN_SEQUENCES: module_name = 'django_evolution.builtin_evolutions' else: module_name = '%s.evolutions' % app_name evolution_module = __import__(module_name, {}, {}, ['']) except ImportError: return [] mutations = [] for label in evolution_labels: directory_name = os.path.dirname(evolution_module.__file__) # The first element is used for compatibility purposes. filenames = [ os.path.join(directory_name, label + '.sql'), os.path.join(directory_name, "%s_%s.sql" % (database, label)), ] found = False for filename in filenames: if os.path.exists(filename): sql = [] sql_file = open(sql_file_name) for line in sql_file: sql.append(line) mutations.append(SQLMutation(label, sql)) found = True break if not found: try: module_name = [evolution_module.__name__, label] module = __import__('.'.join(module_name), {}, {}, [module_name]); mutations.extend(module.MUTATIONS) except ImportError: raise EvolutionException( 'Error: Failed to find an SQL or Python evolution named %s' % label) if is_multi_db(): latest_version = Version.objects.using(database).latest('when') else: latest_version = Version.objects.latest('when') app_label = app.__name__.split('.')[-2] old_proj_sig = pickle.loads(str(latest_version.signature)) proj_sig = create_project_sig(database) if app_label in old_proj_sig and app_label in proj_sig: # We want to go through now and make sure we're only applying # evolutions for models where the signature is different between # what's stored and what's current. # # The reason for this is that we may have just installed a baseline, # which would have the up-to-date signature, and we might be trying # to apply evolutions on top of that (which would already be applied). # These would generate errors. So, try hard to prevent that. old_app_sig = old_proj_sig[app_label] app_sig = proj_sig[app_label] changed_models = set() # Find the list of models in the latest signature of this app # that aren't in the old signature. for model_name, model_sig in app_sig.iteritems(): if (model_name not in old_app_sig or old_app_sig[model_name] != model_sig): changed_models.add(model_name) # Now do the same for models in the old signature, in case the # model has been deleted. for model_name, model_sig in old_app_sig.iteritems(): if model_name not in app_sig: changed_models.add(model_name) # We should now have a full list of which models changed. Filter # the list of mutations appropriately. mutations = [ mutation for mutation in mutations if (not hasattr(mutation, 'model_name') or mutation.model_name in changed_models) ] return mutations
def get_mutations(app, evolution_labels, database): """ Obtain the list of mutations described by the named evolutions. """ # For each item in the evolution sequence. Check each item to see if it is # a python file or an sql file. try: app_name = '.'.join(app.__name__.split('.')[:-1]) if app_name in BUILTIN_SEQUENCES: module_name = 'django_evolution.builtin_evolutions' else: module_name = '%s.evolutions' % app_name evolution_module = __import__(module_name, {}, {}, ['']) except ImportError: return [] mutations = [] for label in evolution_labels: directory_name = os.path.dirname(evolution_module.__file__) # The first element is used for compatibility purposes. filenames = [ os.path.join(directory_name, label + '.sql'), os.path.join(directory_name, "%s_%s.sql" % (database, label)), ] found = False for filename in filenames: if os.path.exists(filename): sql = [] sql_file = open(sql_file_name) for line in sql_file: sql.append(line) mutations.append(SQLMutation(label, sql)) found = True break if not found: try: module_name = [evolution_module.__name__, label] module = __import__('.'.join(module_name), {}, {}, [module_name]) mutations.extend(module.MUTATIONS) except ImportError: raise EvolutionException( 'Error: Failed to find an SQL or Python evolution named %s' % label) if is_multi_db(): latest_version = Version.objects.using(database).latest('when') else: latest_version = Version.objects.latest('when') app_label = app.__name__.split('.')[-2] old_proj_sig = pickle.loads(str(latest_version.signature)) proj_sig = create_project_sig(database) if app_label in old_proj_sig and app_label in proj_sig: # We want to go through now and make sure we're only applying # evolutions for models where the signature is different between # what's stored and what's current. # # The reason for this is that we may have just installed a baseline, # which would have the up-to-date signature, and we might be trying # to apply evolutions on top of that (which would already be applied). # These would generate errors. So, try hard to prevent that. old_app_sig = old_proj_sig[app_label] app_sig = proj_sig[app_label] changed_models = set() # Find the list of models in the latest signature of this app # that aren't in the old signature. for model_name, model_sig in app_sig.iteritems(): if (model_name not in old_app_sig or old_app_sig[model_name] != model_sig): changed_models.add(model_name) # Now do the same for models in the old signature, in case the # model has been deleted. for model_name, model_sig in old_app_sig.iteritems(): if model_name not in app_sig: changed_models.add(model_name) # We should now have a full list of which models changed. Filter # the list of mutations appropriately. mutations = [ mutation for mutation in mutations if (not hasattr(mutation, 'model_name') or mutation.model_name in changed_models) ] return mutations