def diff_many(models1, models2, migrator=None, reverse=False): """Calculate changes for migrations from models2 to models1.""" models1 = pw.sort_models_topologically(models1) models2 = pw.sort_models_topologically(models2) if reverse: models1 = reversed(models1) models2 = reversed(models2) models1 = OrderedDict([(m._meta.name, m) for m in models1]) models2 = OrderedDict([(m._meta.name, m) for m in models2]) changes = [] for name, model1 in models1.items(): if name not in models2: continue changes += diff_one(model1, models2[name], migrator=migrator) # Add models for name in [m for m in models1 if m not in models2]: changes.append(create_model(models1[name], migrator=migrator)) # Remove models for name in [m for m in models2 if m not in models1]: changes.append(remove_model(models2[name])) return changes
def test_declared_dependencies(self): class A(Model): pass class B(Model): a = ForeignKeyField(A) b = ForeignKeyField('self') class NA(Model): class Meta: depends_on = (A, B) class C(Model): b = ForeignKeyField(B) c = ForeignKeyField('self') class Meta: depends_on = (NA, ) class D1(Model): na = ForeignKeyField(NA) class Meta: depends_on = (A, C) class D2(Model): class Meta: depends_on = (NA, D1, C, B) models = [A, B, C, D1, D2] ordered = list(models) for pmodels in permutations(models): ordering = sort_models_topologically(pmodels) self.assertEqual(ordering, ordered)
def test_topological_sort_fundamentals(self): FKF = ForeignKeyField # we will be topo-sorting the following models class A(Model): pass class B(Model): a = FKF(A) # must follow A class C(Model): a, b = FKF(A), FKF(B) # must follow A and B class D(Model): c = FKF(C) # must follow A and B and C class E(Model): e = FKF('self') # but excluding this model, which is a child of E class Excluded(Model): e = FKF(E) # property 1: output ordering must not depend upon input order repeatable_ordering = None for input_ordering in permutations([A, B, C, D, E]): output_ordering = sort_models_topologically(input_ordering) repeatable_ordering = repeatable_ordering or output_ordering self.assertEqual(repeatable_ordering, output_ordering) # property 2: output ordering must have same models as input self.assertEqual(len(output_ordering), 5) self.assertFalse(Excluded in output_ordering) # property 3: parents must precede children def assert_precedes(X, Y): lhs, rhs = map(output_ordering.index, [X, Y]) self.assertTrue(lhs < rhs) assert_precedes(A, B) assert_precedes(B, C) # if true, C follows A by transitivity assert_precedes(C, D) # if true, D follows A and B by transitivity # property 4: independent model hierarchies must be in name order assert_precedes(A, E)
def test_declared_dependencies(self): class A(Model): pass class B(Model): a = ForeignKeyField(A) b = ForeignKeyField('self') class NA(Model): class Meta: depends_on = (A, B) class C(Model): b = ForeignKeyField(B) c = ForeignKeyField('self') class Meta: depends_on = (NA,) class D1(Model): na = ForeignKeyField(NA) class Meta: depends_on = (A, C) class D2(Model): class Meta: depends_on = (NA, D1, C, B) models = [A, B, C, D1, D2] ordered = list(models) for pmodels in permutations(models): ordering = sort_models_topologically(pmodels) self.assertEqual(ordering, ordered)
def sort_by_fk_deps(table_names): table_names_to_models = { cls._meta.db_table: cls for cls in all_models.keys() if cls._meta.db_table in table_names } models = pw.sort_models_topologically(table_names_to_models.values()) return [model._meta.db_table for model in models]
def _initialze_db(): for model in sort_models_topologically(all_models): try: model.select().get() except OperationalError as exc: model.create_table() except DoesNotExist: pass
def fake_fixture(models, field_type_map=None, skip_id=True, on_failure=None): default_field_type_map = { peewee.DateTimeField: datetime.datetime.now, peewee.CharField: faker.word, peewee.IntegerField: random.randrange(1, 10) } def get_value(c, *args, **kwargs): if callable(c): return c(*args, **kwargs) else: return c sorted_models = sort_models_topologically(models.keys()) added_objects = {} if field_type_map is None: field_type_map = default_field_type_map for model in sorted_models: nm = model() logger.info('Creating new:%s model' % model._meta.name) for name, field in model._meta.fields.items(): if skip_id and field.name in ('id',): continue else: if hasattr(faker, field.name): field_value = getattr(faker, field.name)() elif field.name in models[model]: field_value = get_value(models[model][field.name]) elif type(field) in field_type_map: field_value = get_value(field_type_map[type(field)]) else: if type(field) is peewee.ForeignKeyField: if field.rel_model._meta.name in added_objects: field_value = field.rel_model.get(id=added_objects[field.rel_model._meta.name].id) logger.info('Setting: %s.%s==%s' % (model._meta.name, field.name, field_value)) setattr(nm, field.name, field_value) try: nm.save() except Exception as ex: logger.warn(ex.message) if on_failure: on_failure(ex.message, nm, added_objects) else: logger.info('Added model: %s->id = %d' % (nm._meta.name, nm.id)) added_objects[nm._meta.name] = nm return added_objects
def test_declared_dependencies_2(self): class C(Model): pass class B(Model): c = ForeignKeyField(C) class A(Model): class Meta: depends_on = B, c = ForeignKeyField(C) models = [ C, B, A ] ordered = list(models) for pmodels in permutations(models): ordering = sort_models_topologically(pmodels) self.assertEqual(ordering, ordered)
def test_declared_dependencies_simple(self): class A(Model): pass class B(Model): class Meta: depends_on = (A,) class C(Model): b = ForeignKeyField(B) # Implicit dependency. class D(Model): class Meta: depends_on = (C,) models = [A, B, C, D] ordered = list(models) for pmodels in permutations(models): ordering = sort_models_topologically(pmodels) self.assertEqual(ordering, ordered)
def create(self, modelstr): """ Create a new migration file for an existing model. Model could actually also be a module, in which case all Peewee models are extracted from the model and created. :param modelstr: Python class, module, or string pointing to a class or module. :return: True if migration file was created, otherwise False. :type: bool """ model = modelstr if isinstance(modelstr, str): model = pydoc.locate(modelstr) if not model: LOGGER.info('could not import: {}'.format(modelstr)) return False # If it's a module, we need to loop through all the models in it. if inspect.ismodule(model): model_list = [] for item in model.__dict__.values(): if inspect.isclass(item) and issubclass(item, peewee.Model): # Don't create migration file for imported models. if model.__name__ != item.__module__: continue model_list.append(item) for model in peewee.sort_models_topologically(model_list): self.create(model) return True try: name = 'create table {}'.format(model._meta.db_table.lower()) migration = self.next_migration(name) up_ops = build_upgrade_from_model(model) down_ops = build_downgrade_from_model(model) self.write_migration(migration, name=name, upgrade=up_ops, downgrade=down_ops) except Exception as exc: LOGGER.error(exc) return False LOGGER.info('created: {}'.format(migration)) return True
def test_declared_dependencies_2(self): class C(Model): pass class B(Model): c = ForeignKeyField(C) class A(Model): class Meta: depends_on = B, c = ForeignKeyField(C) models = [C, B, A] ordered = list(models) for pmodels in permutations(models): ordering = sort_models_topologically(pmodels) self.assertEqual(ordering, ordered)
def test_declared_dependencies_simple(self): class A(Model): pass class B(Model): class Meta: depends_on = (A, ) class C(Model): b = ForeignKeyField(B) # Implicit dependency. class D(Model): class Meta: depends_on = (C, ) models = [A, B, C, D] ordered = list(models) for pmodels in permutations(models): ordering = sort_models_topologically(pmodels) self.assertEqual(ordering, ordered)
class Meta: database = database class Person(BaseModel): name = CharField(unique=True) class Message(BaseModel): person = ForeignKeyField(Person, related_name='messages') body = TextField() MODELS = [ Person, Message, ] CREATE = sort_models_topologically(MODELS) DROP = reversed(CREATE) class TestBerkeleyDatabase(unittest.TestCase): def setUp(self): with database.transaction(): for model_class in DROP: model_class.drop_table(True) for model_class in CREATE: model_class.create_table(True) def tearDown(self): database.close() os.unlink(DATABASE_FILE) shutil.rmtree('%s-journal' % DATABASE_FILE)
def scan(self): self.statements = list() models = p.sort_models_topologically(self.manager.models) self.order_of_models = [m._meta.db_table for m in models] self.local_models = {m._meta.db_table: m for m in models} with self.manager.using(self.database): self.local = Topology(self.connection, self.local_models) introspector = Introspector.from_database(self.connection) self.online_models = introspector.generate_models() self.online = Topology(self.connection, self.online_models) # first missing tables to be created for db_table in self.order_of_models: if db_table not in self.online.models: local_model = self.local.models[db_table] self.state('create_table', local_model['instance']) # second missing tables to be dropped for db_table, online_model in iteritems(self.online.models): if db_table not in self.local.models: self.state('drop_table', online_model['instance']) # third scan fields to be created, dropped or mutate for db_table, online_model in iteritems(self.online.models): if db_table not in self.local.models: continue local_model = self.local.models[db_table] online_instance = online_model['instance'] local_instance = local_model['instance'] online_fields = online_model['fields'] local_fields = local_model['fields'] online_indexes = online_model['indexes'] local_indexes = local_model['indexes'] # scan indexes to be dropped for online_index in online_indexes: found = any(l == online_index for l in local_indexes) if not found: self.state('drop_index', online_instance, online_index) # fields to be dropped for field_name, online_field in iteritems(online_fields): if field_name not in local_fields: self.state('drop_column', local_instance, online_field) # fields to be added for field_name, local_field in iteritems(local_fields): if field_name not in online_fields: self.state('add_column', local_instance, local_field) # fields to be mutated for field_name, local_field in iteritems(local_fields): if field_name not in online_fields: continue online_field = online_fields[field_name] if local_field == online_field: continue if local_field.test_modifiers_changed(online_field): pass # peewee currently does not support reflection based on # the modifier, when changed it always triggers this # "changed" element. elif local_field.test_null_changed(online_field): if online_field.field.null: self.state('add_not_null', local_instance, local_field) else: self.state('drop_not_null', local_instance, local_field) else: skip = False if local_field.sql != online_field.sql: try: from playhouse.postgres_ext import ArrayField if isinstance(local_field, ArrayField): skip = True except ImportError: pass if skip: self.state('drop_column', online_instance, online_field) self.state('add_column', local_instance, local_field) # scan indexes to be created for local_index in local_indexes: found = any(l == local_index for l in online_indexes) if not found: self.state('add_index', local_instance, local_index)
def create_tables(args): def my_import(name): components = name.split('.') mod = __import__('.'.join(components[:-1])) for comp in components[1:]: mod = getattr(mod, comp) return mod model_classes = [] errors = False for model in args.models: print("Importing model {0}... ".format(colored(model, "magenta")), end='') try: model_class = my_import(model) model_classes.append(model_class) print(colored('OK', 'green')) except ImportError: print(colored('import error', 'red')) errors = True if errors: sys.exit(1) terminator = Terminator(args) next_migration_num = '0001' if terminator._retreive_filenames(): next_migration_num = terminator._retreive_filenames()[-1].split('_')[0] next_migration_num = "%04d" % (int(next_migration_num) + 1) migration_file_name = '{0}/{1}/{2}_auto_create_tables_{3}.py'.format( terminator.folder, 'migrations', next_migration_num, "_".join([m.__name__.lower() for m in model_classes]) if len(model_classes) < 4 else len(model_classes)) qc = terminator.database.compiler() print("Writing down migration file", colored(migration_file_name, 'blue')) with open(migration_file_name, 'w') as migration_file: print("from {0} import {1} as model_class".format( model_classes[0].__module__, model_classes[0].__name__), file=migration_file) print("database = model_class._meta.database", file=migration_file) print("\n\ndef up():", file=migration_file) for m in sort_models_topologically(model_classes): print("\n # Create model", m.__module__ + '.' + m.__name__, file=migration_file) print(" database.execute_sql('%s')\n" % qc.create_table(m)[0], file=migration_file) for field in m._fields_to_index(): print(" database.execute_sql('%s')" % qc.create_index(m, [field], field.unique)[0], file=migration_file) if m._meta.indexes: for fields, unique in m._meta.indexes: fobjs = [m._meta.fields[f] for f in fields] print(" database.execute_sql('%s')" % qc.create_index(m, fobjs, unique)[0], file=migration_file) print("\n\ndef down():", file=migration_file) for m in reversed(sort_models_topologically(model_classes)): print("\n # Drop model", m.__module__ + '.' + m.__name__, file=migration_file) print(" database.execute_sql('%s')\n" % qc.drop_table(m, cascade=True)[0], file=migration_file)
async def drop_model_tables(models, **drop_table_kwargs): """Drop tables for all given models (in the right order).""" for m in reversed(sort_models_topologically(models)): await m.drop_table(**drop_table_kwargs)
async def create_model_tables(models, **create_table_kwargs): """Create tables for all given models (in the right order).""" for m in sort_models_topologically(models): await m.create_table(**create_table_kwargs)
def create_tables(self, models, safe=False): for m in sort_models_topologically(models): yield m.create_table(fail_silently=safe)
def drop_tables(self, models, safe=False, cascade=False): for m in reversed(sort_models_topologically(models)): yield m.drop_table(fail_silently=safe, cascade=cascade)
def truncate_tables(self, models, restart_identity=False, cascade=False): for model in reversed(sort_models_topologically(models)): yield model.truncate_table(restart_identity, cascade)
def setup_tables(): import models model_list = reversed(p.sort_models_topologically(models.BaseModel.__subclasses__())) db.create_tables(model_list, safe=True)
class Person(BaseModel): name = CharField(unique=True) class Message(BaseModel): person = ForeignKeyField(Person, related_name='messages') body = TextField() MODELS = [ Person, Message, ] CREATE = sort_models_topologically(MODELS) DROP = reversed(CREATE) class TestBerkeleyDatabase(unittest.TestCase): def setUp(self): with database.transaction(): for model_class in DROP: model_class.drop_table(True) for model_class in CREATE: model_class.create_table(True) def tearDown(self): database.close() os.unlink(DATABASE_FILE) shutil.rmtree('%s-journal' % DATABASE_FILE)