def test_ticket_11936(self): # Regression for #11936 - loading.get_models should not return deferred # models by default. # Run a couple of defer queries so that app-cache must contain some # deferred classes. It might contain a lot more classes depending on # the order the tests are ran. list(Item.objects.defer("name")) list(Child.objects.defer("value")) klasses = set( map(attrgetter("__name__"), cache.get_models(cache.get_app("defer_regress")))) self.assertIn("Child", klasses) self.assertIn("Item", klasses) self.assertNotIn("Child_Deferred_value", klasses) self.assertNotIn("Item_Deferred_name", klasses) self.assertFalse( any(k._deferred for k in cache.get_models(cache.get_app("defer_regress")))) klasses_with_deferred = set( map( attrgetter("__name__"), cache.get_models(cache.get_app("defer_regress"), include_deferred=True), )) self.assertIn("Child", klasses_with_deferred) self.assertIn("Item", klasses_with_deferred) self.assertIn("Child_Deferred_value", klasses_with_deferred) self.assertIn("Item_Deferred_name", klasses_with_deferred) self.assertTrue( any(k._deferred for k in cache.get_models(cache.get_app("defer_regress"), include_deferred=True)))
def test_ticket_11936(self): # Regression for #11936 - loading.get_models should not return deferred # models by default. # Run a couple of defer queries so that app-cache must contain some # deferred classes. It might contain a lot more classes depending on # the order the tests are ran. list(Item.objects.defer("name")) list(Child.objects.defer("value")) klasses = set(map(attrgetter("__name__"), cache.get_models(cache.get_app("defer_regress")))) self.assertIn("Child", klasses) self.assertIn("Item", klasses) self.assertNotIn("Child_Deferred_value", klasses) self.assertNotIn("Item_Deferred_name", klasses) self.assertFalse(any(k._deferred for k in cache.get_models(cache.get_app("defer_regress")))) klasses_with_deferred = set( map(attrgetter("__name__"), cache.get_models(cache.get_app("defer_regress"), include_deferred=True)) ) self.assertIn("Child", klasses_with_deferred) self.assertIn("Item", klasses_with_deferred) self.assertIn("Child_Deferred_value", klasses_with_deferred) self.assertIn("Item_Deferred_name", klasses_with_deferred) self.assertTrue( any(k._deferred for k in cache.get_models(cache.get_app("defer_regress"), include_deferred=True)) ) # Regression for #16409 - make sure defer() and only() work with annotate() self.assertIsInstance(list(SimpleItem.objects.annotate(Count("feature")).defer("name")), list) self.assertIsInstance(list(SimpleItem.objects.annotate(Count("feature")).only("name")), list)
def handle_noargs(self, **options): if settings.MEDIA_ROOT == '': print "MEDIA_ROOT is not set, nothing to do" return # Get a list of all files under MEDIA_ROOT media = [] for root, dirs, files in os.walk(settings.MEDIA_ROOT): for f in files: media.append(os.path.abspath(os.path.join(root, f))) # Get list of all fields (value) for each model (key) # that is a FileField or subclass of a FileField model_dict = defaultdict(list) for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if issubclass(field.__class__, models.FileField): model_dict[model].append(field) # Get a list of all files referenced in the database referenced = [] for model in model_dict.iterkeys(): all = model.objects.all().iterator() for object in all: for field in model_dict[model]: referenced.append( os.path.abspath(getattr(object, field.name).path)) # Print each file in MEDIA_ROOT that is not referenced in the database for m in media: if m not in referenced: print m
def get_models_from_cache(app): try: from django.apps import apps return apps.get_models(app) except ImportError: from django.db.models.loading import cache return cache.get_models(app)
def handle_noargs(self, **options): if settings.MEDIA_ROOT == '': print "MEDIA_ROOT is not set, nothing to do" return # Get a list of all files under MEDIA_ROOT media = [] for root, dirs, files in os.walk(settings.MEDIA_ROOT): for f in files: media.append(os.path.abspath(os.path.join(root, f))) # Get list of all fields (value) for each model (key) # that is a FileField or subclass of a FileField model_dict = defaultdict(list) for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if issubclass(field.__class__, models.FileField): model_dict[model].append(field) # Get a list of all files referenced in the database referenced = [] for model in model_dict.iterkeys(): all = model.objects.all().iterator() for object in all: for field in model_dict[model]: referenced.append(os.path.abspath(getattr(object, field.name).path)) # Print each file in MEDIA_ROOT that is not referenced in the database for m in media: if m not in referenced: print m
def find_models_with_filefield(): for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if isinstance(field, models.FileField): pre_save.connect(remove_old_files, sender=model) post_delete.connect(remove_files, sender=model) break
def get_models_with_local_permissions(): """ This is a simple helper function that retrieves the list of installed models for which local permission management is active. """ # we are only interested in installed model classes for which the `local_grants` property is defined rv = [m for m in cache.get_models() if m._meta.installed and hasattr(m, 'local_grants')] return rv
def setup_databases(self): for alias in connections: connection = connections[alias] creation = connection.creation test_db_name = creation._get_test_db_name() # Mess with the DB name so other things operate on a test DB # rather than the real one. This is done in create_test_db when # we don't monkeypatch it away with _skip_create_test_db. orig_db_name = connection.settings_dict['NAME'] connection.settings_dict['NAME'] = test_db_name if not _reusing_db() and _can_support_reuse_db(connection): print ('To reuse old database "%s" for speed, set env var ' 'REUSE_DB=1.' % test_db_name) if _should_create_database(connection): # We're not using _skip_create_test_db, so put the DB name back: connection.settings_dict['NAME'] = orig_db_name # Since we replaced the connection with the test DB, closing # the connection will avoid pooling issues with SQLAlchemy. The # issue is trying to CREATE/DROP the test database using a # connection to a DB that was established with that test DB. # MySQLdb doesn't allow it, and SQLAlchemy attempts to reuse # the existing connection from its pool. connection.close() else: # Reset auto-increment sequences. Apparently, SUMO's tests are # horrid and coupled to certain numbers. cursor = connection.cursor() style = no_style() if uses_mysql(connection): reset_statements = _mysql_reset_sequences(style, connection) else: reset_statements = connection.ops.sequence_reset_sql( style, cache.get_models()) for reset_statement in reset_statements: cursor.execute(reset_statement) # Django v1.3 (https://code.djangoproject.com/ticket/9964) # starts using commit_unless_managed() for individual # connections. Backwards compatibility for Django 1.2 is to use # the generic transaction function. transaction.commit_unless_managed(using=connection.alias) creation.create_test_db = new.instancemethod( _skip_create_test_db, creation, creation.__class__) Command.handle = _foreign_key_ignoring_handle # With our class patch, does nothing but return some connection # objects: return super(NoseTestSuiteRunner, self).setup_databases()
def setup_databases(self): for alias in connections: connection = connections[alias] creation = connection.creation test_db_name = creation._get_test_db_name() # Mess with the DB name so other things operate on a test DB # rather than the real one. This is done in create_test_db when # we don't monkeypatch it away with _skip_create_test_db. orig_db_name = connection.settings_dict['NAME'] connection.settings_dict['NAME'] = test_db_name if _should_create_database(connection): # We're not using _skip_create_test_db, so put the DB name # back: connection.settings_dict['NAME'] = orig_db_name # Since we replaced the connection with the test DB, closing # the connection will avoid pooling issues with SQLAlchemy. The # issue is trying to CREATE/DROP the test database using a # connection to a DB that was established with that test DB. # MySQLdb doesn't allow it, and SQLAlchemy attempts to reuse # the existing connection from its pool. connection.close() else: # Reset auto-increment sequences. Apparently, SUMO's tests are # horrid and coupled to certain numbers. cursor = connection.cursor() style = no_style() if uses_mysql(connection): reset_statements = _mysql_reset_sequences( style, connection) else: reset_statements = connection.ops.sequence_reset_sql( style, cache.get_models()) for reset_statement in reset_statements: cursor.execute(reset_statement) # Django v1.3 (https://code.djangoproject.com/ticket/9964) # starts using commit_unless_managed() for individual # connections. Backwards compatibility for Django 1.2 is to use # the generic transaction function. transaction.commit_unless_managed(using=connection.alias) # Each connection has its own creation object, so this affects # only a single connection: creation.create_test_db = new.instancemethod( _skip_create_test_db, creation, creation.__class__) Command.handle = _foreign_key_ignoring_handle # With our class patch, does nothing but return some connection # objects: return super(NoseTestSuiteRunner, self).setup_databases()
def find_models_with_filefield(): result = [] for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if isinstance(field, models.FileField): result.append(model) break return result
def flush_cache(apps, options): """ Clears the image cache """ spec_class_list = options['spec_class'] apps = [a.strip(',') for a in apps] for app_label in apps: app = cache.get_app(app_label) models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)] if apps: for app_label in apps: app = cache.get_app(app_label) models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)] for model in models: print 'Flushing cache for "%s.%s"' % (app_label, model.__name__) for obj in model.objects.order_by('-pk'): if spec_class_list: for spec_name in spec_class_list: try: spec = model._ik.specs[spec_name] except KeyError: print('Model %s has no spec named %s' % (model.__name__, spec_name)) continue prop = getattr(obj, spec.name(), None) if prop is not None: prop._delete() if spec.pre_cache: print('Creating %s: %d' % (spec_name,obj.id)) prop._create() else: for spec in model._ik.specs.values(): print('Flushing item %d' % obj.pk) prop = getattr(obj, spec.name(), None) if prop is not None: prop._delete() if spec.pre_cache: prop._create() else: print 'Please specify one or more app names'
def get_models_with_local_permissions(): """ This is a simple helper function that retrieves the list of installed models for which local permission management is active. """ # we are only interested in installed model classes for which the `local_grants` property is defined rv = [ m for m in cache.get_models() if m._meta.installed and hasattr(m, 'local_grants') ] return rv
def test_dynamic_load(self): """ Makes a new model at runtime and ensures it goes into the right place. """ old_models = cache.get_models(cache.get_app("app_cache")) # Construct a new model in a new app cache body = {} new_app_cache = BaseAppCache() meta_contents = { 'app_label': "app_cache", 'app_cache': new_app_cache, } meta = type("Meta", tuple(), meta_contents) body['Meta'] = meta body['__module__'] = TotallyNormal.__module__ temp_model = type("SouthPonies", (models.Model,), body) # Make sure it appeared in the right place! self.assertEqual( old_models, cache.get_models(cache.get_app("app_cache")), ) self.assertEqual(new_app_cache.get_model("app_cache", "SouthPonies"), temp_model)
def _get_needed_files(self): result = [] for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: file_fields = [field.name for field in model._meta.fields if field.get_internal_type() == 'FileField'] if len(file_fields) > 0: files = model.objects.all().values_list(*file_fields) result.extend([split_name(file)[0] for file in itertools. chain.from_iterable(files) if file]) return result
def flush_cache(apps, options): apps = [a.strip(',') for a in apps] if apps: for app_label in apps: app = cache.get_app(app_label) for model in [m for m in cache.get_models(app)]: print 'Flushing cache for "%s.%s"' % (app_label, model.__name__) for obj in model.objects.order_by('-pk'): for spec_file in get_spec_files(obj): spec_file.delete(save=False) if spec_file.field.pre_cache: spec_file.generate(False) else: print 'Please specify one or more app names'
def stashed_object_counts(request): """A context processor which adds counts of stashed objects of models which inherit from SessionStashable to RequestContext. To make a make a count appear for a particular model, set the class attribute context_count_name to an appropriate string to name its context variable, and enable this context processor in settings.py. """ extra_context = {} for app in cache.get_apps(): for model in cache.get_models(app): if issubclass(model, SessionStashable) and model.context_count_name: extra_context[model.context_count_name] = model.num_stashed_in_session(request.session) return extra_context
def _get_needed_files(self): result = [] for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: file_fields = [ field.name for field in model._meta.fields if field.get_internal_type() == 'FileField' ] if len(file_fields) > 0: files = model.objects.all().values_list(*file_fields) result.extend([ split_name(file)[0] for file in itertools.chain.from_iterable(files) if file ]) return result
def main(): DATE=datetime.today().date() # Datestamp e.g 2002-09-21 DOW=datetime.today().strftime("%A") # Day of the week e.g. Monday # DOM=datetime.today().day # Date of the Month e.g. 27 # M=datetime.today().strftime("%B") # Month e.g January # W=datetime.today().strftime("%W") # Week Number e.g 37 # Borramos el fichero de la/s semana/s pasada/s os.system("rm -fv %s/*.rtg2.%s.sql.gz" % (settings.MYSQL_BACKUP_DIR, DOW)) rtg_models=cache.get_models(cache.get_app('rtg')) all_models = [model._meta.db_table for model in rtg_models] all_models_text = " ".join(all_models) fichero_backup ="%s/%s.rtg2.%s.sql" % (settings.MYSQL_BACKUP_DIR, DATE, DOW) dtb = settings.DATABASES['default'] os.system("mysqldump -c --user=%s --password=%s --host=%s -f %s %s > %s" % ( dtb['USER'], dtb['PASSWORD'], dtb['HOST'], dtb['NAME'], all_models_text, fichero_backup)) os.system("gzip -f %s" % (fichero_backup,))
def handle_noargs(self, **options): if settings.MEDIA_ROOT == '': print("MEDIA_ROOT is not set, nothing to do") return # Get a list of all files under MEDIA_ROOT media = [] for root, dirs, files in os.walk(settings.MEDIA_ROOT): for f in files: if ('geoserver_icons' not in root) and ('resized' not in root): media.append(os.path.abspath(os.path.join(root, f))) # Get list of all fields (value) for each model (key) # that is a FileField or subclass of a FileField model_dict = defaultdict(list) for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if issubclass(field.__class__, models.FileField): model_dict[model].append(field) # Get a list of all files referenced in the database referenced = [] for model in model_dict: all = model.objects.all().iterator() for object in all: for field in model_dict[model]: target_file = getattr(object, field.name) if target_file: referenced.append(os.path.abspath(target_file.path)) # Print each file in MEDIA_ROOT that is not referenced in the database c = 0 for m in media: if m not in referenced: print 'Removing image %s' % m os.remove(m) c = c + 1 print 'Removed %s images, from a total of %s (referenced %s)' % ( c, len(media), len(referenced))
def flush_cache(apps, options): """ Clears the image cache """ apps = [a.strip(',') for a in apps] if apps: for app_label in apps: app = cache.get_app(app_label) models = [m for m in cache.get_models(app) if issubclass(m, ImageModel)] for model in models: print 'Flushing cache for "%s.%s"' % (app_label, model.__name__) for obj in model.objects.all(): for spec in model._ik.specs: prop = getattr(obj, spec.name(), None) if prop is not None: prop._delete() if spec.pre_cache: prop._create() else: print 'Please specify on or more app names'
def find_orphaned_files(path=""): """Prints a list of all files in the path that are not referenced in the database by all apps. """ if not getattr(settings, "MEDIA_ROOT", None): sys.stdout.write("MEDIA_ROOT is not set, nothing to do") return # Get a list of all files under MEDIA_ROOT. media = set() for root, dirs, files in os.walk(os.path.join(settings.MEDIA_ROOT, path)): for f in files: media.add(os.path.abspath(os.path.join(root, f))) # Get list of all fields (value) for each model (key) # that is a FileField or subclass of a FileField. model_dict = defaultdict(list) for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if issubclass(field.__class__, models.FileField): model_dict[model].append(field) # Get a list of all files referenced in the database. referenced = set() for model in model_dict.iterkeys(): all = model.objects.all().iterator() for object in all: for field in model_dict[model]: f = getattr(object, field.name) if f: referenced.add(os.path.abspath(f.path)) # Print each file that is not referenced in the database. for f in sorted(media - referenced): sys.stdout.write(f) sys.stdout.write("\n")
def find_orphaned_files(path=''): """Prints a list of all files in the path that are not referenced in the database by all apps. """ if not getattr(settings, 'MEDIA_ROOT', None): sys.stdout.write('MEDIA_ROOT is not set, nothing to do') return # Get a list of all files under MEDIA_ROOT. media = set() for root, dirs, files in os.walk(os.path.join(settings.MEDIA_ROOT, path)): for f in files: media.add(os.path.abspath(os.path.join(root, f))) # Get list of all fields (value) for each model (key) # that is a FileField or subclass of a FileField. model_dict = defaultdict(list) for app in cache.get_apps(): model_list = cache.get_models(app) for model in model_list: for field in model._meta.fields: if issubclass(field.__class__, models.FileField): model_dict[model].append(field) # Get a list of all files referenced in the database. referenced = set() for model in model_dict.iterkeys(): all = model.objects.all().iterator() for object in all: for field in model_dict[model]: f = getattr(object, field.name) if f: referenced.add(os.path.abspath(f.path)) # Print each file that is not referenced in the database. for f in sorted(media - referenced): sys.stdout.write(f) sys.stdout.write('\n')
def flush_cache(apps, options): """ Clears the image cache """ apps = [a.strip(',') for a in apps] if apps: for app_label in apps: app = cache.get_app(app_label) models = [ m for m in cache.get_models(app) if issubclass(m, ImageModel) ] for model in models: print 'Flushing cache for "%s.%s"' % (app_label, model.__name__) for obj in model.objects.iterator(): for spec in model._ik.specs: prop = getattr(obj, spec.name(), None) if prop is not None: prop._delete() if spec.pre_cache: prop._create() else: print 'Please specify on or more app names'
def flush_cache(apps, options): """ Clears the image cache """ apps = [a.strip(',') for a in apps] if apps: print 'Flushing cache for %s...' % ', '.join(apps) else: print 'Flushing caches...' for app_label in apps: app = cache.get_app(app_label) models = [ m for m in cache.get_models(app) if issubclass(m, ImageModel) ] for model in models: for obj in model.objects.all(): for spec in model._ik.specs: prop = getattr(obj, spec.name(), None) if prop is not None: prop._delete() if spec.pre_cache: prop._create()
# some data for multi callgroup development __author__ = '' from django.core.management import setup_environ import MHLogin.settings as settings setup_environ(settings) from django.db.models.loading import cache as model_cache from MHLogin.MHLUsers.models import * from MHLogin.MHLPractices.models import * from MHLogin.MHLCallGroups.models import * from MHLogin.utils.fields import * if not model_cache.loaded: model_cache.get_models() if __name__ == '__main__': practice1 = PracticeLocation() practice1.practice_name = 'San Jose Practice' practice1.practice_address1 = 'McKee Rd' practice1.practice_city = 'San Jose' practice1.practice_state = 'CA' practice1.practice_phone = '8002464123' practice1.practice_lat = '37.358765' practice1.practice_longit = '-121.860021' practice1.time_zone = "US/Pacific" practice1.save() practice2 = PracticeLocation() practice2.practice_name = 'San Diego Practice'
# File that determines what each URL points to. This uses _Python_ regular # expressions, not Perl's. # # See: # http://diveintopython.org/regular_expressions/street_addresses.html#re.matching.2.3 # from django.conf.urls.defaults import * from django.conf import settings from django.contrib import admin # fix to resolve lazy-loading bug # https://code.djangoproject.com/ticket/10405#comment:11 from django.db.models.loading import cache as model_cache if not model_cache.loaded: model_cache.get_models() # loop over all settings.INSTALLED_APPS and execute code in # files named admin.py in each such app (this will add those # models to the admin site) admin.autodiscover() # Setup the root url tree from / urlpatterns = patterns('', # User Authentication url(r'^accounts/login', 'django.contrib.auth.views.login'), url(r'^accounts/logout', 'django.contrib.auth.views.logout'), # Front page url(r'^', include('src.web.website.urls')),
def _get_models(apps): models = [] for app_label in apps or []: app = cache.get_app(app_label) models += [m for m in cache.get_models(app)] return models
def _get_models(apps): ret = [] for app_label in apps or []: app = cache.get_app(app_label) ret += [m for m in cache.get_models(app)] return ret
from django import forms from django.contrib.contenttypes.models import ContentType from genericglue.forms import GenericForeignKeyField from genericglue.utils import table_exists try: from django.db.models.loading import cache as apps except ImportError: from django.apps import apps if table_exists(ContentType._meta.db_table): MODELS_WITH_PERMALINKS = [ContentType.objects.get_for_model(model) for model in apps.get_models() if getattr(model, 'get_absolute_url', None)] MODEL_IDS_WITH_PERMALINKS = [ct.id for ct in MODELS_WITH_PERMALINKS] else: MODELS_WITH_PERMALINKS = [] MODEL_IDS_WITH_PERMALINKS = [] class WithGenericObjectForm(forms.ModelForm): """ A class for setting up inlines with a generic FK. It assumes the generic object is defined by the fields `object_id` and `object_type` on your inline model. It needs to be subclassed and the subclass needs to have a meta class defined that sets the model that you're inlining:: class Meta: model = YourInlineModel By default this class restricts the GFK model drop down just to content types that have get_absolute_url methods. To change this restriction
def test_basic(self): # Deferred fields should really be deferred and not accidentally use # the field's default value just because they aren't passed to __init__ Item.objects.create(name="first", value=42) obj = Item.objects.only("name", "other_value").get(name="first") # Accessing "name" doesn't trigger a new database query. Accessing # "value" or "text" should. def test(): self.assertEqual(obj.name, "first") self.assertEqual(obj.other_value, 0) self.assertNumQueries(0, test) def test(): self.assertEqual(obj.value, 42) self.assertNumQueries(1, test) def test(): self.assertEqual(obj.text, "xyzzy") self.assertNumQueries(1, test) def test(): self.assertEqual(obj.text, "xyzzy") self.assertNumQueries(0, test) # Regression test for #10695. Make sure different instances don't # inadvertently share data in the deferred descriptor objects. i = Item.objects.create(name="no I'm first", value=37) items = Item.objects.only("value").order_by("-value") self.assertEqual(items[0].name, "first") self.assertEqual(items[1].name, "no I'm first") RelatedItem.objects.create(item=i) r = RelatedItem.objects.defer("item").get() self.assertEqual(r.item_id, i.id) self.assertEqual(r.item, i) # Some further checks for select_related() and inherited model # behaviour (regression for #10710). c1 = Child.objects.create(name="c1", value=42) c2 = Child.objects.create(name="c2", value=37) Leaf.objects.create(name="l1", child=c1, second_child=c2) obj = Leaf.objects.only("name", "child").select_related()[0] self.assertEqual(obj.child.name, "c1") self.assertQuerysetEqual( Leaf.objects.select_related().only("child__name", "second_child__name"), ["l1"], attrgetter("name") ) # Models instances with deferred fields should still return the same # content types as their non-deferred versions (bug #10738). ctype = ContentType.objects.get_for_model c1 = ctype(Item.objects.all()[0]) c2 = ctype(Item.objects.defer("name")[0]) c3 = ctype(Item.objects.only("name")[0]) self.assertTrue(c1 is c2 is c3) # Regression for #10733 - only() can be used on a model with two # foreign keys. results = Leaf.objects.only("name", "child", "second_child").select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") results = Leaf.objects.only( "name", "child", "second_child", "child__name", "second_child__name" ).select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") # Test for #12163 - Pickling error saving session with unsaved model # instances. SESSION_KEY = "2b1189a188b44ad18c35e1baac6ceead" item = Item() item._deferred = False s = SessionStore(SESSION_KEY) s.clear() s["item"] = item s.save() s = SessionStore(SESSION_KEY) s.modified = True s.save() i2 = s["item"] self.assertFalse(i2._deferred) # Regression for #11936 - loading.get_models should not return deferred # models by default. klasses = sorted(cache.get_models(cache.get_app("defer_regress")), key=lambda klass: klass.__name__) self.assertEqual(klasses, [Child, Item, Leaf, Proxy, RelatedItem, ResolveThis]) klasses = sorted( map(attrgetter("__name__"), cache.get_models(cache.get_app("defer_regress"), include_deferred=True)) ) self.assertEqual( klasses, [ "Child", "Child_Deferred_value", "Item", "Item_Deferred_name", "Item_Deferred_name_other_value_text", "Item_Deferred_name_other_value_value", "Item_Deferred_other_value_text_value", "Item_Deferred_text_value", "Leaf", "Leaf_Deferred_child_id_second_child_id_value", "Leaf_Deferred_name_value", "Leaf_Deferred_second_child_value", "Leaf_Deferred_value", "Proxy", "RelatedItem", "RelatedItem_Deferred_", "RelatedItem_Deferred_item_id", "ResolveThis", ], )
def _stashable_models(): for app in cache.get_apps(): for model in cache.get_models(app): if issubclass(model, SessionStashable): yield model
def test_basic(self): # Deferred fields should really be deferred and not accidentally use # the field's default value just because they aren't passed to __init__ Item.objects.create(name="first", value=42) obj = Item.objects.only("name", "other_value").get(name="first") # Accessing "name" doesn't trigger a new database query. Accessing # "value" or "text" should. with self.assertNumQueries(0): self.assertEqual(obj.name, "first") self.assertEqual(obj.other_value, 0) with self.assertNumQueries(1): self.assertEqual(obj.value, 42) with self.assertNumQueries(1): self.assertEqual(obj.text, "xyzzy") with self.assertNumQueries(0): self.assertEqual(obj.text, "xyzzy") # Regression test for #10695. Make sure different instances don't # inadvertently share data in the deferred descriptor objects. i = Item.objects.create(name="no I'm first", value=37) items = Item.objects.only("value").order_by("-value") self.assertEqual(items[0].name, "first") self.assertEqual(items[1].name, "no I'm first") RelatedItem.objects.create(item=i) r = RelatedItem.objects.defer("item").get() self.assertEqual(r.item_id, i.id) self.assertEqual(r.item, i) # Some further checks for select_related() and inherited model # behavior (regression for #10710). c1 = Child.objects.create(name="c1", value=42) c2 = Child.objects.create(name="c2", value=37) Leaf.objects.create(name="l1", child=c1, second_child=c2) obj = Leaf.objects.only("name", "child").select_related()[0] self.assertEqual(obj.child.name, "c1") self.assertQuerysetEqual( Leaf.objects.select_related().only("child__name", "second_child__name"), [ "l1", ], attrgetter("name")) # Models instances with deferred fields should still return the same # content types as their non-deferred versions (bug #10738). ctype = ContentType.objects.get_for_model c1 = ctype(Item.objects.all()[0]) c2 = ctype(Item.objects.defer("name")[0]) c3 = ctype(Item.objects.only("name")[0]) self.assertTrue(c1 is c2 is c3) # Regression for #10733 - only() can be used on a model with two # foreign keys. results = Leaf.objects.only("name", "child", "second_child").select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") results = Leaf.objects.only("name", "child", "second_child", "child__name", "second_child__name").select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") # Test for #12163 - Pickling error saving session with unsaved model # instances. SESSION_KEY = '2b1189a188b44ad18c35e1baac6ceead' item = Item() item._deferred = False s = SessionStore(SESSION_KEY) s.clear() s["item"] = item s.save() s = SessionStore(SESSION_KEY) s.modified = True s.save() i2 = s["item"] self.assertFalse(i2._deferred) # Regression for #11936 - loading.get_models should not return deferred # models by default. klasses = sorted(cache.get_models(cache.get_app("defer_regress")), key=lambda klass: klass.__name__) self.assertEqual(klasses, [ Child, Feature, Item, ItemAndSimpleItem, Leaf, Proxy, RelatedItem, ResolveThis, SimpleItem, SpecialFeature, ]) klasses = sorted( map( attrgetter("__name__"), cache.get_models(cache.get_app("defer_regress"), include_deferred=True), )) # FIXME: This is dependent on the order in which tests are run -- # this test case has to be the first, otherwise a LOT more classes # appear. self.assertEqual(klasses, [ "Child", "Child_Deferred_value", "Feature", "Item", "ItemAndSimpleItem", "Item_Deferred_name", "Item_Deferred_name_other_value_text", "Item_Deferred_name_other_value_value", "Item_Deferred_other_value_text_value", "Item_Deferred_text_value", "Leaf", "Leaf_Deferred_child_id_second_child_id_value", "Leaf_Deferred_name_value", "Leaf_Deferred_second_child_id_value", "Leaf_Deferred_value", "Proxy", "RelatedItem", "RelatedItem_Deferred_", "RelatedItem_Deferred_item_id", "ResolveThis", "SimpleItem", "SpecialFeature", ]) # Regression for #16409 - make sure defer() and only() work with annotate() self.assertIsInstance( list(SimpleItem.objects.annotate(Count('feature')).defer('name')), list) self.assertIsInstance( list(SimpleItem.objects.annotate(Count('feature')).only('name')), list)
def test_basic(self): # Deferred fields should really be deferred and not accidentally use # the field's default value just because they aren't passed to __init__ Item.objects.create(name="first", value=42) obj = Item.objects.only("name", "other_value").get(name="first") # Accessing "name" doesn't trigger a new database query. Accessing # "value" or "text" should. with self.assertNumQueries(0): self.assertEqual(obj.name, "first") self.assertEqual(obj.other_value, 0) with self.assertNumQueries(1): self.assertEqual(obj.value, 42) with self.assertNumQueries(1): self.assertEqual(obj.text, "xyzzy") with self.assertNumQueries(0): self.assertEqual(obj.text, "xyzzy") # Regression test for #10695. Make sure different instances don't # inadvertently share data in the deferred descriptor objects. i = Item.objects.create(name="no I'm first", value=37) items = Item.objects.only("value").order_by("-value") self.assertEqual(items[0].name, "first") self.assertEqual(items[1].name, "no I'm first") RelatedItem.objects.create(item=i) r = RelatedItem.objects.defer("item").get() self.assertEqual(r.item_id, i.id) self.assertEqual(r.item, i) # Some further checks for select_related() and inherited model # behavior (regression for #10710). c1 = Child.objects.create(name="c1", value=42) c2 = Child.objects.create(name="c2", value=37) Leaf.objects.create(name="l1", child=c1, second_child=c2) obj = Leaf.objects.only("name", "child").select_related()[0] self.assertEqual(obj.child.name, "c1") self.assertQuerysetEqual( Leaf.objects.select_related().only("child__name", "second_child__name"), [ "l1", ], attrgetter("name") ) # Models instances with deferred fields should still return the same # content types as their non-deferred versions (bug #10738). ctype = ContentType.objects.get_for_model c1 = ctype(Item.objects.all()[0]) c2 = ctype(Item.objects.defer("name")[0]) c3 = ctype(Item.objects.only("name")[0]) self.assertTrue(c1 is c2 is c3) # Regression for #10733 - only() can be used on a model with two # foreign keys. results = Leaf.objects.only("name", "child", "second_child").select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") results = Leaf.objects.only("name", "child", "second_child", "child__name", "second_child__name").select_related() self.assertEqual(results[0].child.name, "c1") self.assertEqual(results[0].second_child.name, "c2") # Regression for #11936 - loading.get_models should not return deferred # models by default. klasses = sorted( cache.get_models(cache.get_app("defer_regress")), key=lambda klass: klass.__name__ ) self.assertEqual( klasses, [ Child, Feature, Item, ItemAndSimpleItem, Leaf, OneToOneItem, Proxy, RelatedItem, ResolveThis, SimpleItem, SpecialFeature, ] ) klasses = sorted( map( attrgetter("__name__"), cache.get_models( cache.get_app("defer_regress"), include_deferred=True ), ) ) # FIXME: This is dependent on the order in which tests are run -- # this test case has to be the first, otherwise a LOT more classes # appear. self.assertEqual( klasses, [ "Child", "Child_Deferred_value", "Feature", "Item", "ItemAndSimpleItem", "Item_Deferred_name", "Item_Deferred_name_other_value_text", "Item_Deferred_name_other_value_value", "Item_Deferred_other_value_text_value", "Item_Deferred_text_value", "Leaf", "Leaf_Deferred_child_id_second_child_id_value", "Leaf_Deferred_name_value", "Leaf_Deferred_second_child_id_value", "Leaf_Deferred_value", "OneToOneItem", "Proxy", "RelatedItem", "RelatedItem_Deferred_", "RelatedItem_Deferred_item_id", "ResolveThis", "SimpleItem", "SpecialFeature", ] )
def all_concrete_models(): return [(app, [model for model in cache.get_models(app) if not model._meta.abstract]) for app in cache.get_apps() if cache.get_models(app)]
#pylint: disable-msg=W0212 import add_path # truqui para añadir los paths que nos interesan #IGNORE:W0611 import os # cargamos los settings del proyecto from django.core.management import setup_environ import settings setup_environ(settings) from django.db.models.loading import cache from optparse import OptionParser #cogemos los modelos de la aplicacion rtg con el orden como estan #definidos en models.py rtg_models=cache.get_models(cache.get_app('rtg')) rtg_models.reverse() parser = OptionParser() parser.add_option("-b", "--backup", action="store_true", dest="backup", default=False, help="hacer backup de las tablas") parser.add_option("-r", "--restore", action="store_true", dest="restore", default=False, help="hacer restore de las tablas") (options, dummy) = parser.parse_args() dtb = settings.DATABASES['default'] print "Trabajando con DB %s" % (dtb['NAME'])
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __author__ = 'Emanuele Bertoldi <*****@*****.**>' __copyright__ = 'Copyright (c) 2013 Emanuele Bertoldi' __version__ = '0.0.1' from django.conf.urls import * from django.conf import settings from django.db.models.loading import cache # Workaround for Django's ticket #10405. # See http://code.djangoproject.com/ticket/10405#comment:10 for more info. if not cache.loaded: cache.get_models() # Basic URL patterns bootstrap. urlpatterns = patterns('',) if 'django.contrib.admin' in settings.INSTALLED_APPS: from django.contrib import admin admin.autodiscover() urlpatterns += patterns('', (r'^admin/', include(admin.site.urls))) if 'django.contrib.admindocs' in settings.INSTALLED_APPS: urlpatterns += patterns('', (r'^admin/doc/', include('django.contrib.admindocs.urls'))) if 'django.contrib.staticfiles' in settings.INSTALLED_APPS: from django.contrib.staticfiles.urls import staticfiles_urlpatterns urlpatterns += staticfiles_urlpatterns()