def test_sealed(self): class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedB', lambda self: self.name.upper()) self.resolver.seal() self.assertEqual(self.resolver._sealed, True) self.assertEqual(self.resolver._initialized, False) self.assertEqual(self.resolver._map_loaded, False) # should raise on new fields or models with self.assertRaises(ResolverException): self.resolver.add_field(rt_field) with self.assertRaises(ResolverException): self.resolver.add_model(rt_model) with self.assertRaises(ResolverException): generate_computedmodel(self.resolver, 'RuntimeGeneratedC', lambda self: self.name.upper()) class_prepared.disconnect(self.resolver.add_model) # should allow access to models_with_computedfields, computedfields_with_models self.assertEqual(list(self.resolver.models_with_computedfields), [(rt_model, {rt_field})]) self.assertEqual(list(self.resolver.computedfields_with_models), [(rt_field, {rt_model})]) # should raise on computed_models with self.assertRaises(ResolverException): self.resolver.computed_models
def _class_prepared_handler(sender, **kwargs): """ Signal handler for class_prepared. This will be run for every model, looking for the moment when all dependent models are prepared for the first time. It will then run the given function, only once. """ sender_app=sender._meta.app_label.lower()+'.'+sender._meta.object_name already_prepared=set([sender_app]) for app,models in app_cache.app_models.items(): for model_name,model in models.items(): already_prepared.add(app.lower()+'.'+model_name) if all([x in already_prepared for x in dependencies]): db.start_transaction() try: # We need to disconnect, otherwise each new dynamo model generation # will trigger it and cause a "maximim recursion error" class_prepared.disconnect(_class_prepared_handler,weak=False) fn() except DatabaseError, message: # If tables are missing altogether, not much we can do # until syncdb/migrate is run. "The code must go on" in this # case, without running our function completely. At least # database operations will be rolled back. db.rollback_transaction() # Better connect again if message<>'no such table: dynamo_metamodel': class_prepared.connect(_class_prepared_handler, weak=False) else: raise else: db.commit_transaction()
def test_initialized_full_wrong_modelbase(self): class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedF', lambda self: self.name.upper(), True) class_prepared.disconnect(self.resolver.add_model) with self.assertRaises(ResolverException): self.resolver.initialize()
def ready(self): # disconnect model discovery to avoid resolver issues with models created later at runtime class_prepared.disconnect(BOOT_RESOLVER.add_model) # do not run graph reduction in migrations and own commands, # that deal with it in their own specific way for token in ('makemigrations', 'migrate', 'help', 'rendergraph', 'createmap'): if token in sys.argv: # pragma: no cover BOOT_RESOLVER.initialize(True) return # normal startup BOOT_RESOLVER.initialize() # connect signals from computedfields.handlers import ( postsave_handler, predelete_handler, postdelete_handler, m2m_handler, get_old_handler) from django.db.models.signals import ( post_save, m2m_changed, pre_delete, post_delete, pre_save) pre_save.connect( get_old_handler, sender=None, weak=False, dispatch_uid='COMP_FIELD_PRESAVE') post_save.connect( postsave_handler, sender=None, weak=False, dispatch_uid='COMP_FIELD') pre_delete.connect( predelete_handler, sender=None, weak=False, dispatch_uid='COMP_FIELD_PREDELETE') post_delete.connect( postdelete_handler, sender=None, weak=False, dispatch_uid='COMP_FIELD_POSTDELETE') m2m_changed.connect( m2m_handler, sender=None, weak=False, dispatch_uid='COMP_FIELD_M2M')
def test_initialized_models_only(self): class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedD', lambda self: self.name.upper()) class_prepared.disconnect(self.resolver.add_model) self.resolver.initialize(models_only=True) self.assertEqual(self.resolver._sealed, True) self.assertEqual(self.resolver._initialized, True) self.assertEqual(self.resolver._map_loaded, False) # should allow access to computed_models self.assertEqual(self.resolver.computed_models, {rt_model: {'comp': rt_field}})
def test_initialized_full(self): class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedE', lambda self: self.name.upper()) class_prepared.disconnect(self.resolver.add_model) self.resolver.initialize() self.assertEqual(self.resolver._sealed, True) self.assertEqual(self.resolver._initialized, True) self.assertEqual(self.resolver._map_loaded, True) # should have all maps loaded self.assertEqual(self.resolver._map, {}) self.assertEqual(self.resolver._fk_map, {}) self.assertEqual(self.resolver._local_mro, {rt_model: {'base': ['comp'], 'fields': {'comp': 1, 'name': 1}}})
def test_runtime_coverage(self): class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedH', lambda self: self.name.upper()) class_prepared.disconnect(self.resolver.add_model) self.resolver.initialize() # MRO expansion self.assertEqual(self.resolver.get_local_mro(rt_model), ['comp']) self.assertEqual(self.resolver.get_local_mro(models.Concrete), []) # update_computedfields with update_fields expansion self.assertEqual(self.resolver.update_computedfields(rt_model(), {'name'}), {'name', 'comp'}) self.assertEqual(self.resolver.update_computedfields(models.Concrete(), {'name'}), {'name'}) # is_computedfield test self.assertEqual(self.resolver.is_computedfield(rt_model, 'name'), False) self.assertEqual(self.resolver.is_computedfield(rt_model, 'comp'), True) self.assertEqual(self.resolver.is_computedfield(models.Concrete, 'name'), False)
def test_initialstate(self): # all states should be false self.assertEqual(self.resolver._sealed, False) self.assertEqual(self.resolver._initialized, False) self.assertEqual(self.resolver._map_loaded, False) # should allow to add fields and models class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedA', lambda self: self.name.upper()) class_prepared.disconnect(self.resolver.add_model) self.assertEqual(self.resolver.computedfields, {rt_field}) self.assertEqual(self.resolver.models, {rt_model}) # should raise on computed_models, models_with_computedfields, computedfields_with_models with self.assertRaises(ResolverException): self.resolver.computed_models with self.assertRaises(ResolverException): list(self.resolver.models_with_computedfields) with self.assertRaises(ResolverException): list(self.resolver.computedfields_with_models)
def test_pickled_load(self): # write pickled map file class_prepared.connect(self.resolver.add_model) rt_field, rt_model = generate_computedmodel(self.resolver, 'RuntimeGeneratedG', lambda self: self.name.upper()) class_prepared.disconnect(self.resolver.add_model) self.resolver.initialize() # patch test_full.models (otherwise pickle doesnt work) models.RuntimeGeneratedG = rt_model settings.COMPUTEDFIELDS_MAP = 'mapfile.test_generated' self.resolver._write_pickled_data() # load back pickled file data = self.resolver._load_pickled_data() settings.COMPUTEDFIELDS_MAP = None os.remove('mapfile.test_generated') # compare pickle data self.assertEqual(data['hash'], self.resolver._calc_modelhash()) self.assertEqual(data['lookup_map'], self.resolver._map) self.assertEqual(data['fk_map'], self.resolver._fk_map) self.assertEqual(data['local_mro'], self.resolver._local_mro)
def resolve(**kwargs): clz = kwargs['sender'] # noinspection PyProtectedMember if clz._meta.app_label == app_label and clz._meta.object_name == model_name: field.related_model = clz class_prepared.disconnect(resolve, weak=False)
def receiver(sender, **kwargs): opts = sender._meta if (opts.app_label == app_label and opts.object_name == object_name): class_prepared.disconnect(receiver) callback(sender)