def test_partial_router(self): "A router can choose to implement a subset of methods" dive = Book.objects.using('other').create(title="Dive into Python", published=datetime.date(2009, 5, 4)) # First check the baseline behaviour self.assertEqual(router.db_for_read(User), 'other') self.assertEqual(router.db_for_read(Book), 'other') self.assertEqual(router.db_for_write(User), 'default') self.assertEqual(router.db_for_write(Book), 'default') self.assertTrue(router.allow_relation(dive, dive)) self.assertTrue(router.allow_syncdb('default', User)) self.assertTrue(router.allow_syncdb('default', Book)) router.routers = [WriteRouter(), AuthRouter(), TestRouter()] self.assertEqual(router.db_for_read(User), 'default') self.assertEqual(router.db_for_read(Book), 'other') self.assertEqual(router.db_for_write(User), 'writer') self.assertEqual(router.db_for_write(Book), 'writer') self.assertTrue(router.allow_relation(dive, dive)) self.assertFalse(router.allow_syncdb('default', User)) self.assertTrue(router.allow_syncdb('default', Book))
def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ db = self._db or router.db_for_read(self.model, instance=self.instance) empty_strings_as_null = connections[db].features.interprets_empty_strings_as_nulls queryset._add_hints(instance=self.instance) if self._db: queryset = queryset.using(self._db) queryset = queryset.filter(**self.core_filters) for field in self.field.foreign_related_fields: val = getattr(self.instance, field.attname) if val is None or (val == '' and empty_strings_as_null): return queryset.none() if self.field.many_to_one: # Guard against field-like objects such as GenericRelation # that abuse create_reverse_many_to_one_manager() with reverse # one-to-many relationships instead and break known related # objects assignment. try: target_field = self.field.target_field except FieldError: # The relationship has multiple target fields. Use a tuple # for related object id. rel_obj_id = tuple([ getattr(self.instance, target_field.attname) for target_field in self.field.get_path_info()[-1].target_fields ]) else: rel_obj_id = getattr(self.instance, target_field.attname) queryset._known_related_objects = {self.field: {rel_obj_id: self.instance}} return queryset
def _check_content_types(self): """ Check that if Django and/or Djangae contenttypes are being used that they are configured correctly. """ from django.conf import settings contenttype_configuration_error = ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." ) if 'django.contrib.contenttypes' in settings.INSTALLED_APPS: from django.db import router, connections from django.contrib.contenttypes.models import ContentType conn = connections[router.db_for_read(ContentType)] if conn.settings_dict.get("ENGINE") != 'djangae.db.backends.appengine': # Don't enforce djangae.contrib.contenttypes if content types are being # saved to a different database backend return if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS: # Raise error if User is using Django CT, but not Djangae raise contenttype_configuration_error else: if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \ settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'): # Raise error if User is using both Django and Djangae CT, but # Django CT comes after Djangae CT raise contenttype_configuration_error from django.core import checks from djangae import checks
def prepare_models(): tables = get_tables() models = get_django_models() sa_models = getattr(Cache, 'models', {}) for model in models: name = model._meta.db_table mixin = getattr(model, 'aldjemy_mixin', None) bases = (mixin, BaseSQLAModel) if mixin else (BaseSQLAModel, ) table = tables[name] # because querying happens on sqlalchemy side, we can use only one # type of queries for alias, so we use 'read' type sa_models[name] = type(model._meta.object_name, bases, {'table': table, 'alias': router.db_for_read(model)}) for model in models: name = model._meta.db_table if 'id' not in sa_models[name].__dict__: table = tables[name] attrs = _extract_model_attrs(model, sa_models) name = model._meta.db_table orm.mapper(sa_models[name], table, attrs) model.sa = sa_models[name] Cache.models = sa_models
def get_prefetch_queryset(self, instances, queryset=None): if queryset is not None: raise ValueError("Custom queryset can't be used for this lookup.") instance = instances[0] from django.db import connections db = self._db or router.db_for_read(instance.__class__, instance=instance) fieldname = ('object_id' if issubclass(self.through, CommonGenericTaggedItemBase) else 'content_object') fk = self.through._meta.get_field(fieldname) query = { '%s__%s__in' % (self.through.tag_relname(), fk.name): set(obj._get_pk_val() for obj in instances) } join_table = self.through._meta.db_table source_col = fk.column connection = connections[db] qn = connection.ops.quote_name qs = self.get_queryset(query).using(db).extra( select={ '_prefetch_related_val': '%s.%s' % (qn(join_table), qn(source_col)) } ) return (qs, attrgetter('_prefetch_related_val'), lambda obj: obj._get_pk_val(), False, self.prefetch_cache_name)
def get(self, key, default=None, version=None): key = self.make_key(key, version=version) self.validate_key(key) db = router.db_for_read(self.cache_model_class) connection = connections[db] table = connection.ops.quote_name(self._table) with connection.cursor() as cursor: cursor.execute("SELECT cache_key, value, expires FROM %s " "WHERE cache_key = %%s" % table, [key]) row = cursor.fetchone() if row is None: return default expires = row[2] expression = models.Expression(output_field=models.DateTimeField()) for converter in (connection.ops.get_db_converters(expression) + expression.get_db_converters(connection)): expires = converter(expires, expression, connection, {}) if expires < timezone.now(): db = router.db_for_write(self.cache_model_class) connection = connections[db] with connection.cursor() as cursor: cursor.execute("DELETE FROM %s " "WHERE cache_key = %%s" % table, [key]) return default value = connection.ops.process_clob(row[1]) return pickle.loads(base64.b64decode(force_bytes(value)))
def _get_database_connection(cls, action): if cls._db_connection is None: cls._db_connection = { 'read': connections[router.db_for_read(cls)], 'write': connections[router.db_for_write(cls)] } return cls._db_connection[action]
def db(self): if self._db: return self._db if self._for_write: return router.db_for_write(self.model, instance=getattr(self, '_instance', None)) return router.db_for_read(self.model)
def __get__(self, instance, instance_type=None): if instance is None: return self cache_name = self.field.get_cache_name() try: return getattr(instance, cache_name) except AttributeError: val = getattr(instance, self.field.attname) if val is None: # If NULL is an allowed value, return it. if self.field.null: return None raise self.field.rel.to.DoesNotExist other_field = self.field.rel.get_related_field() if other_field.rel: params = {'%s__pk' % self.field.rel.field_name: val} else: params = {'%s__exact' % self.field.rel.field_name: val} # If the related manager indicates that it should be used for # related fields, respect that. rel_mgr = self.field.rel.to._default_manager db = router.db_for_read(self.field.rel.to, instance=instance) if getattr(rel_mgr, 'use_for_related_fields', False): rel_obj = rel_mgr.using(db).get(**params) else: rel_obj = QuerySet(self.field.rel.to).using(db).get(**params) setattr(instance, cache_name, rel_obj) return rel_obj
def get_prefetch_queryset(self, instances): # mostly a copy of get_prefetch_query_set from ManyRelatedManager # but with addition of proper ordering db = self._db or router.db_for_read(instances[0].__class__, instance=instances[0]) query = {'%s__pk__in' % self.query_field_name: set(obj._get_pk_val() for obj in instances)} qs = super(RelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**query) # M2M: need to annotate the query in order to get the primary model # that the secondary model was actually related to. We know that # there will already be a join on the join table, so we can just add # the select. # For non-autocreated 'through' models, can't assume we are # dealing with PK values. fk = self.through._meta.get_field(self.source_field_name) source_col = fk.column join_table = self.through._meta.db_table connection = connections[db] qn = connection.ops.quote_name qs = qs.extra(select={'_prefetch_related_val': '%s.%s' % (qn(join_table), qn(source_col))}, order_by=['%s.%s' % ( rel.through._meta.db_table, rel.through._sort_field_name, )]) select_attname = fk.rel.get_related_field().get_attname() return (qs, attrgetter('_prefetch_related_val'), attrgetter(select_attname), False, self.prefetch_cache_name)
def set_i18n(self, culture, data): """Set i18n data for a model.""" if not self.pk: raise I18NValidationError("Cannot set i18n data on an unsaved model") fields = self.i18n.model._meta.get_all_field_names() # FIXME: This is VERY fragile in it's current state table = self._meta.db_table cursor = connections[router.db_for_read(self.__class__)].cursor() findquery = "SELECT * FROM %s_i18n WHERE id=%%s AND culture=%%s" % self._meta.db_table cursor.execute(findquery, [self.pk, culture]) row = cursor.fetchone() args = [kv for kv in data.iteritems() if kv[0] in fields] uquery = None if row is not None: fstr = ", ".join(["%s=%%s" % k[0] for k in args]) uquery = "UPDATE %s_i18n SET %s WHERE id=%%s AND culture=%%s" % (table, fstr) else: kstr = ", ".join([a[0] for a in args]) vstr = ", ".join(['%s' for a in args]) uquery = "INSERT INTO %s_i18n (%s,id,culture) VALUES (%s,%%s,%%s)" % ( table, kstr, vstr) cursor.execute(uquery, [a[1] for a in args] + [self.pk, culture]) transaction.commit_unless_managed(using=router.db_for_write(self.__class__))
def _get_database_connection(cls, action): if cls._db_connection is None: cls._db_connection = { "read": connections[router.db_for_read(cls)], "write": connections[router.db_for_write(cls)], } return cls._db_connection[action]
def get_prefetch_query_set(self, instances): instance = instances[0] from django.db import connections db = self._db or router.db_for_read(instance.__class__, instance=instance) fk = self.through._meta.get_field('object_id' if issubclass(self.through, GenericTaggedItemBase) else 'content_object') pk_set = set(obj._get_pk_val() for obj in instances) query = {'%s__%s__in' % (self.through.tag_relname(), fk.name): pk_set} join_table = self.through._meta.db_table source_col = fk.column connection = connections[db] qn = connection.ops.quote_name qs = self.get_query_set().using(db)._next_is_sticky().filter( **query ).extra(select={ '_prefetch_related_val': '%s.%s' % (qn(join_table), qn(source_col)) }) return (qs, operator.attrgetter('_prefetch_related_val'), operator.attrgetter(instance._meta.pk.name), False, self.prefetch_cache_name)
def get_connection(model=Award, read_only=True): """ As of this writing, USAspending alternates database reads between multiple databases using usaspending_api.routers.replicas.ReadReplicaRouter. Django will not take advantage of this router when executing raw SQL against a connection. This function will help with that by using the database router to choose an appropriate connection. Both db_for_read and db_for_write need a model to help them decide which database connection to choose. My advice is to supply the model associated with the primary table in your query. If you do not supply a model, the Award model will be used as it is fairly central to the database as a whole and will work for nearly all queries. model - A Django model that represents a database table germaine to your query. If one is not supplied, Award will be used since it is fairly central to the database as a whole. read_only - Unfortunately, Django cannot understand SQL so we need to explicitly tell it whether or not we intend to make changes to the database. read_only = True if we only intend to query. read_only = False if we will be making any changes (UPDATE, DELETE, etc.) The router uses this and model when determining which database connection to return. Returns an appropriate Django database connection. """ if read_only: _connection = connections[router.db_for_read(model)] else: _connection = connections[router.db_for_write(model)] return _connection
def get(self, key, default=None, version=None): key = self.make_key(key, version=version) self.validate_key(key) db = router.db_for_read(self.cache_model_class) table = connections[db].ops.quote_name(self._table) with connections[db].cursor() as cursor: cursor.execute("SELECT cache_key, value, expires FROM %s " "WHERE cache_key = %%s" % table, [key]) row = cursor.fetchone() if row is None: return default now = timezone.now() expires = row[2] if connections[db].features.needs_datetime_string_cast and not isinstance(expires, datetime): # Note: typecasting is needed by some 3rd party database backends. # All core backends work without typecasting, so be careful about # changes here - test suite will NOT pick regressions here. expires = typecast_timestamp(str(expires)) if expires < now: db = router.db_for_write(self.cache_model_class) with connections[db].cursor() as cursor: cursor.execute("DELETE FROM %s " "WHERE cache_key = %%s" % table, [key]) return default value = connections[db].ops.process_clob(row[1]) return pickle.loads(base64.b64decode(force_bytes(value)))
def ready(self): from .patches import json json.patch() from djangae.db.backends.appengine.caching import reset_context from django.core.signals import request_finished, request_started request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset") request_started.connect(reset_context, dispatch_uid="request_started_context_reset") from django.conf import settings contenttype_configuration_error = ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." ) if 'django.contrib.contenttypes' in settings.INSTALLED_APPS: from django.db import router, connections from django.contrib.contenttypes.models import ContentType conn = connections[router.db_for_read(ContentType)] if conn.settings_dict.get("ENGINE") != 'djangae.db.backends.appengine': # Don't enforce djangae.contrib.contenttypes if content types are being # saved to a different database backend return if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS: # Raise error if User is using Django CT, but not Djangae raise contenttype_configuration_error else: if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \ settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'): # Raise error if User is using both Django and Djangae CT, but # Django CT comes after Djangae CT raise contenttype_configuration_error
def get_group_tag_value_count(self, project_id, group_id, environment_id, key): if db.is_postgres(): # This doesnt guarantee percentage is accurate, but it does ensure # that the query has a maximum cost using = router.db_for_read(models.GroupTagValue) cursor = connections[using].cursor() cursor.execute( """ SELECT SUM(t) FROM ( SELECT times_seen as t FROM sentry_messagefiltervalue WHERE group_id = %s AND key = %s ORDER BY last_seen DESC LIMIT 10000 ) as a """, [group_id, key] ) return cursor.fetchone()[0] or 0 cutoff = timezone.now() - timedelta(days=7) return models.GroupTagValue.objects.filter( group_id=group_id, key=key, last_seen__gte=cutoff, ).aggregate(t=Sum('times_seen'))['t']
def get_query_set(self): db = self._db or router.db_for_read(self.model, instance=self.instance) query = { '%s__pk' % self.content_type_field_name: self.content_type.id, '%s__exact' % self.object_id_field_name: self.pk_val, } return superclass.get_query_set(self).using(db).filter(**query)
def get_many(self, keys, version=None): made_key_to_key = { self.make_key(key, version=version): key for key in keys } made_keys = list(made_key_to_key.keys()) for key in made_keys: self.validate_key(key) db = router.db_for_read(self.cache_model_class) table = connections[db].ops.quote_name(self._table) with connections[db].cursor() as cursor: cursor.execute( self._get_many_query.format(table=table), (made_keys, self._now()) ) rows = cursor.fetchall() data = {} for made_key, value, value_type in rows: key = made_key_to_key[made_key] data[key] = self.decode(value, value_type) return data
def get_queryset(self): db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance) if (hasattr(self.instance, "_prefetched_objects_cache") and self.field.name in self.instance._prefetched_objects_cache): qs = self.instance._prefetched_objects_cache[self.field.name] if self.ordered: lookup = {} for item in qs._result_cache: lookup[item.pk] = item # If this is a ListField make sure the result set retains the right order qs._result_cache = [] for pk in getattr(self.instance, self.field.attname): qs._result_cache.append(lookup[pk]) return qs else: return qs elif self.ordered and not self.reverse: values = self.field.value_from_object(self.instance) qcls = OrderedQuerySet(self.model, using=db) qcls.ordered_pks = values[:] else: qcls = super(RelatedIteratorManagerBase, self).get_queryset() return ( qcls.using(db)._next_is_sticky().filter(**self.core_filters) )
def supports_cursor(queryset): #First, see if we are using one of Django's built-in connections #if we are then, return False from django.db import router db = router.db_for_read(queryset.query.model) compiler = queryset.query.get_compiler(using=db) if "django.db." in str(compiler.__class__): return False def isnt_in_or_exclude_query(queryset): lookup = 'in' def traverse_where_tree(nodes): for n in nodes: if not isinstance(n, WhereNode): # This is a leaf node, so it contains the actual query specs if lookup in n: return True elif n.negated: return True else: in_lookups = traverse_where_tree(n.children) if in_lookups is not None: return in_lookups where = queryset.query.where if where.negated: return False return not traverse_where_tree(where.children) # It still might not support cursors, so we # check if the query doesn't have exclude filters or __in lookups return isnt_in_or_exclude_query(queryset)
def get_with_prefix(self, prefix, version=None): if self.reverse_key_func is None: raise ValueError( "To use the _with_prefix commands with a custom KEY_FUNCTION, " "you need to specify a custom REVERSE_KEY_FUNCTION too." ) if version is None: version = self.version db = router.db_for_read(self.cache_model_class) table = connections[db].ops.quote_name(self._table) prefix = self.make_key(prefix + '%', version=version) version = six.text_type(version) with connections[db].cursor() as cursor: cursor.execute( """SELECT cache_key, value, value_type FROM {table} WHERE cache_key LIKE %s AND expires >= %s""".format(table=table), (prefix, self._now()) ) rows = cursor.fetchall() data = {} for made_key, value, value_type in rows: key, key_prefix, key_version = self.reverse_key_func(made_key) data[key] = self.decode(value, value_type) return data
def keys_with_prefix(self, prefix, version=None): if self.reverse_key_func is None: raise ValueError( "To use the _with_prefix commands with a custom KEY_FUNCTION, " "you need to specify a custom REVERSE_KEY_FUNCTION too." ) if version is None: version = self.version db = router.db_for_read(self.cache_model_class) table = connections[db].ops.quote_name(self._table) prefix = self.make_key(prefix + '%', version=version) with connections[db].cursor() as cursor: cursor.execute( """SELECT cache_key FROM {table} WHERE cache_key LIKE %s AND expires >= %s""".format(table=table), (prefix, self._now()) ) rows = cursor.fetchall() full_keys = {row[0] for row in rows} keys = {} for full_key in full_keys: key, key_prefix, key_version = self.reverse_key_func(full_key) if key_version == version: keys[key] = key_version return set(six.iterkeys(keys))
def is_installed(self): """Checks whether django-watson is installed.""" connection = connections[router.db_for_read(SearchEntry)] cursor = connection.cursor() cursor.execute("SHOW INDEX FROM watson_searchentry WHERE Key_name = 'watson_searchentry_fulltext'") return bool(cursor.fetchall())
def get_query_set(self): kwargs = { self.target.ct_field: self.content_type, self.source_field_name: self.instance } db = self._db = router.db_for_read(self.instance.__class__, instance=self.instance) return superclass.get_query_set(self).using(db).filter(pk__in=self.through._default_manager.using(db).filter(**kwargs).values_list(self.target.fk_field, flat=True)).distinct()
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False): "Returns field's value prepared for database lookup." prepped = self.get_prep_lookup(lookup_type, value) if connection.alias != router.db_for_read(self.model): if isinstance(prepped, (list, tuple)): return prepped return [prepped]
def get_object_for_this_type(self, **kwargs): """ Returns an object of this type for the keyword arguments given. Basically, this is a proxy around this object_type's get_object() model method. The ObjectNotExist exception, if thrown, will not be caught, so code that calls this method should catch it. """ return self.model_class()._default_manager.using(router.db_for_read(self.model_class()) or DEFAULT_DB_ALIAS).get(**kwargs)
def database_vendor(model, mode='read'): if mode == 'read': database = router.db_for_read(model) elif mode == 'write': database = router.db_for_write(model) else: raise Exception('Invalid database mode specified') return connections[database].vendor
def __new__(cls): """Guess the correct search backend and initialize it.""" connection = connections[router.db_for_read(SearchEntry)] if connection.vendor == "postgresql": return PostgresSearchBackend() if connection.vendor == "mysql": return MySQLSearchBackend() return RegexSearchBackend()
def __get__(self, instance, instance_type=None): if instance is None: return self.field file_val = None if self.is_file_field: file_val = instance.__dict__[self.file_field.name] # Dynamically create a class that subclasses the related model's # default manager. rel_model = compat_rel_to(self.field) superclass = rel_model._default_manager.__class__ RelatedManager = create_generic_related_manager(superclass) qn = connection.ops.quote_name manager_kwargs = { 'prefetch_cache_name': self.field.attname, } join_cols = self.field.get_joining_columns(reverse_join=True)[0] ct_manager = ContentType.objects.db_manager(instance._state.db) content_type = ct_manager.get_for_model(instance, for_concrete_model=self.for_concrete_model) manager = RelatedManager( model=rel_model, instance=instance, field=self.field, source_col_name=qn(join_cols[0]), target_col_name=qn(join_cols[1]), content_type=content_type, content_type_field_name=self.field.content_type_field_name, object_id_field_name=self.field.object_id_field_name, field_identifier_field_name=self.field.field_identifier_field_name, **manager_kwargs) if not manager.pk_val: val = None else: if not self.is_file_field: return manager try: val = self.field.get_cached_value(instance) except KeyError: db = manager._db or router.db_for_read(rel_model, instance=instance) qset = superclass.get_queryset(manager).using(db) try: val = qset.get(**manager.core_filters) except rel_model.DoesNotExist: val = None self.set_file_value(instance, file_val, obj=val) self.field.set_cached_value(instance, val) return instance.__dict__[self.file_field.name]
def _build_queryset( self, project, query=None, status=None, tags=None, bookmarked_by=None, assigned_to=None, first_release=None, sort_by='date', unassigned=None, subscribed_by=None, age_from=None, age_from_inclusive=True, age_to=None, age_to_inclusive=True, last_seen_from=None, last_seen_from_inclusive=True, last_seen_to=None, last_seen_to_inclusive=True, date_from=None, date_from_inclusive=True, date_to=None, date_to_inclusive=True, active_at_from=None, active_at_from_inclusive=True, active_at_to=None, active_at_to_inclusive=True, times_seen=None, times_seen_lower=None, times_seen_lower_inclusive=True, times_seen_upper=None, times_seen_upper_inclusive=True, cursor=None, limit=None, environment=None, ): from sentry.models import Event, Group, GroupSubscription, GroupStatus, OrganizationMember if tags is None: tags = {} engine = get_db_engine('default') queryset = Group.objects.filter(project=project) if query: # TODO(dcramer): if we want to continue to support search on SQL # we should at least optimize this in Postgres so that it does # the query filter **after** the index filters, and restricts the # result set queryset = queryset.filter( Q(message__icontains=query) | Q(culprit__icontains=query)) if status is None: status_in = ( GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS, GroupStatus.PENDING_MERGE, ) queryset = queryset.exclude(status__in=status_in) else: queryset = queryset.filter(status=status) if bookmarked_by: queryset = queryset.filter( bookmark_set__project=project, bookmark_set__user=bookmarked_by, ) if assigned_to: teams = [] try: member = OrganizationMember.objects.get( user=assigned_to, organization_id=project.organization_id, ) except OrganizationMember.DoesNotExist: pass else: teams = member.get_teams() queryset = queryset.filter( Q(assignee_set__user=assigned_to, assignee_set__project=project) | Q(assignee_set__team__in=teams) ) elif unassigned in (True, False): queryset = queryset.filter( assignee_set__isnull=unassigned, ) if subscribed_by is not None: queryset = queryset.filter( id__in=GroupSubscription.objects.filter( project=project, user=subscribed_by, is_active=True, ).values_list('group'), ) if first_release: if first_release is EMPTY: return queryset.none() queryset = queryset.filter( first_release__organization_id=project.organization_id, first_release__version=first_release, ) if environment is not None: # XXX: This overwrites the ``environment`` tag, if present, to # ensure that the result set is limited to groups that have been # seen in this environment (there is no way to search for groups # that match multiple values of a single tag without changes to the # tagstore API.) tags['environment'] = environment.name if tags: matches = tagstore.get_group_ids_for_search_filter( project.id, environment.id if environment is not None else None, tags, ) if not matches: return queryset.none() queryset = queryset.filter( id__in=matches, ) if age_from or age_to: params = {} if age_from: if age_from_inclusive: params['first_seen__gte'] = age_from else: params['first_seen__gt'] = age_from if age_to: if age_to_inclusive: params['first_seen__lte'] = age_to else: params['first_seen__lt'] = age_to queryset = queryset.filter(**params) if last_seen_from or last_seen_to: params = {} if last_seen_from: if last_seen_from_inclusive: params['last_seen__gte'] = last_seen_from else: params['last_seen__gt'] = last_seen_from if last_seen_to: if last_seen_to_inclusive: params['last_seen__lte'] = last_seen_to else: params['last_seen__lt'] = last_seen_to queryset = queryset.filter(**params) if active_at_from or active_at_to: params = {} if active_at_from: if active_at_from_inclusive: params['active_at__gte'] = active_at_from else: params['active_at__gt'] = active_at_from if active_at_to: if active_at_to_inclusive: params['active_at__lte'] = active_at_to else: params['active_at__lt'] = active_at_to queryset = queryset.filter(**params) if times_seen is not None: queryset = queryset.filter(times_seen=times_seen) if times_seen_lower is not None or times_seen_upper is not None: params = {} if times_seen_lower is not None: if times_seen_lower_inclusive: params['times_seen__gte'] = times_seen_lower else: params['times_seen__gt'] = times_seen_lower if times_seen_upper is not None: if times_seen_upper_inclusive: params['times_seen__lte'] = times_seen_upper else: params['times_seen__lt'] = times_seen_upper queryset = queryset.filter(**params) if date_from or date_to: params = { 'project_id': project.id, } if date_from: if date_from_inclusive: params['datetime__gte'] = date_from else: params['datetime__gt'] = date_from if date_to: if date_to_inclusive: params['datetime__lte'] = date_to else: params['datetime__lt'] = date_to event_queryset = Event.objects.filter(**params) if query: event_queryset = event_queryset.filter( message__icontains=query) # limit to the first 1000 results group_ids = event_queryset.distinct().values_list( 'group_id', flat=True)[:1000] # if Event is not on the primary database remove Django's # implicit subquery by coercing to a list base = router.db_for_read(Group) using = router.db_for_read(Event) # MySQL also cannot do a LIMIT inside of a subquery if base != using or engine.startswith('mysql'): group_ids = list(group_ids) queryset = queryset.filter( id__in=group_ids, ) if engine.startswith('sqlite'): score_clause = SQLITE_SORT_CLAUSES[sort_by] elif engine.startswith('mysql'): score_clause = MYSQL_SORT_CLAUSES[sort_by] elif engine.startswith('oracle'): score_clause = ORACLE_SORT_CLAUSES[sort_by] elif engine in MSSQL_ENGINES: score_clause = MSSQL_SORT_CLAUSES[sort_by] else: score_clause = SORT_CLAUSES[sort_by] queryset = queryset.extra( select={'sort_value': score_clause}, ) return queryset
def field_schema(self): """Generate schema for consumption by clients.""" type_map = { 'AutoField': 'String', 'BooleanField': 'Boolean', 'CharField': 'String', 'DateTimeField': 'Date', 'DecimalField': 'Number', 'FloatField': 'Number', 'ForeignKey': 'String', 'PositiveIntegerField': 'Number', 'TextField': 'String', } db_type_map = { 'serial': 'Number', 'text': 'String', 'boolean': 'Boolean', 'integer': 'Number', } # Django supports model._meta -> pylint: disable=W0212 meta = self.model._meta connection = connections[router.db_for_read(self.model.objects.none())] for field in meta.local_fields: int_type = field.get_internal_type() schema = { 'type': (type_map.get(int_type, None)) or (db_type_map.get(field.db_type(connection), 'String')) } rel = getattr(field, 'rel', None) if rel: schema['type'] = 'String' schema['relation'] = { 'name': field.name, 'collection': model_name(rel.to), } choices = getattr(field, 'choices', None) if choices: schema['allowedValues'] = [val for val, _ in choices] schema['autoform'] = { 'options': [{ 'label': desc, 'value': val } for val, desc in choices], } blank = getattr(field, 'blank', None) if blank: schema['optional'] = True formfield = field.formfield() if formfield: schema['label'] = force_text(formfield.label) max_length = getattr(field, 'max_length', None) if max_length is not None: schema['max'] = max_length if int_type == 'PositiveIntegerField': schema['min'] = 0 if int_type in ('DecimalField', 'FloatField'): schema['decimal'] = True yield field.column, schema for field in meta.local_many_to_many: schema = { 'type': '[String]', 'relation': { 'name': field.name, 'collection': model_name(field.rel.to), }, } blank = getattr(field, 'blank', None) if blank: schema['optional'] = True formfield = field.formfield() if formfield: schema['label'] = force_text(formfield.label) yield '%s_ids' % field.column, schema
def get_db_meta(app=DEFAULT_APP_NAME, db_alias=None, table=None, verbosity=0, column=None): """Return a dict of dicts containing metadata about the database tables associated with an app TODO: allow multiple apps >>> get_db_meta('crawler', db_alias='default', table='crawler_wikiitem') # doctest: +ELLIPSIS OrderedDict([('WikiItem', OrderedDict([('Meta', OrderedDict([('primary_key', None), ('count', 1332), ('db_table', u'crawler_wikiitem')])), (u'id', OrderedDict([('name', u'id'), ('type', ... """ if verbosity: print 'Looking for app %r.' % (app, ) if app and isinstance(app, basestring): app = djdb.get_app(app, verbosity=verbosity) else: app = djdb.get_app('') model_names = list(mc.__name__ for mc in models.get_models(app)) if verbosity: print 'Found %d models for app %r.' % (len(model_names), app) meta = OrderedDict() # inspectdb uses: for table_name in connection.introspection.table_names(cursor): for model_name in model_names: model = djdb.get_model(model_name, app=app) if db_alias: model_db_alias = db_alias else: model_db_alias = router.db_for_read(model) queryset = model.objects if model_db_alias: queryset = queryset.using(model_db_alias) if model and table is not None and isinstance(table, basestring): if model._meta.db_table != table: if verbosity > 1: print 'Skipped model named %s with db table names %s.' % ( model_name, model._meta.db_table) continue elif callable(table): if not table(model._meta.db_table): if verbosity > 1: print 'Skipped model named %s with db table names %s.' % ( model_name, model._meta.db_table) continue count = None try: if verbosity > 1: print 'Trying to count records in model %r and db_alias %r' % ( model, model_db_alias) count = queryset.count() except DatabaseError as e: if verbosity: print_exc() print "DatabaseError: Unable to count records for model '%s' (%s) because of %s." % ( model.__name__, repr(model), e) connection.close() except: print_exc() print 'Connection doesnt exist?' meta[model_name] = OrderedDict() meta[model_name]['Meta'] = OrderedDict() meta[model_name]['Meta']['primary_key'] = None meta[model_name]['Meta']['count'] = count meta[model_name]['Meta']['db_table'] = model._meta.db_table if verbosity > 1: print '%s.Meta = %r' % (model_name, meta[model_name]['Meta']) # inspectdb uses: connection.introspection.get_table_description(cursor, table_name) properties_of_fields = sql.get_meta_dicts( cursor=model_db_alias, table=meta[model_name]['Meta']['db_table'], verbosity=verbosity) model_meta = OrderedDict( (field['name'], field) for field in properties_of_fields) if verbosity > 1: print '-' * 20 + model_name + '-' * 20 db_primary_keys = [ field['name'] for field in properties_of_fields if field['primary_key'] ] if len(db_primary_keys) == 1: meta[model_name]['Meta']['primary_key'] = db_primary_keys[0] # augment model_meta with additional stats, but only if there are enough rows to get statistics model_meta = augment_model_meta(model, model_db_alias, model_meta, column_name_filter=column, count=count, verbosity=verbosity) if verbosity > 1: print model_meta meta[model_name].update(model_meta) return meta
def databases(self): return {"default", router.db_for_read(AuditEvent)}
def get_queryset(self): if rpc_enabled(router.db_for_read(self.model)): return self._rpc_queryset_class(model=self.model, using=self._db, hints=self._hints) return super(DjangoRpcManager, self).get_queryset()
def get_query_set(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (AttributeError, KeyError): db = self._db or router.db_for_read(self.model, instance=self.instance) return super(GenericRelatedObjectManager, self).get_query_set().using(db).filter(**self.core_filters)
def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ db = self._db or router.db_for_read(self.model, instance=self.instance) return queryset.using(db).filter(**self.core_filters)
def _build_queryset(self, project, query=None, status=None, tags=None, bookmarked_by=None, assigned_to=None, first_release=None, sort_by='date', unassigned=None, age_from=None, age_from_inclusive=True, age_to=None, age_to_inclusive=True, date_from=None, date_from_inclusive=True, date_to=None, date_to_inclusive=True, cursor=None, limit=None): from sentry.models import Event, Group, GroupStatus engine = get_db_engine('default') queryset = Group.objects.filter(project=project) if query: # TODO(dcramer): if we want to continue to support search on SQL # we should at least optimize this in Postgres so that it does # the query filter **after** the index filters, and restricts the # result set queryset = queryset.filter( Q(message__icontains=query) | Q(culprit__icontains=query) ) if status is None: queryset = queryset.exclude( status__in=( GroupStatus.PENDING_DELETION, GroupStatus.DELETION_IN_PROGRESS, GroupStatus.PENDING_MERGE, ) ) else: queryset = queryset.filter(status=status) if bookmarked_by: queryset = queryset.filter( bookmark_set__project=project, bookmark_set__user=bookmarked_by, ) if assigned_to: queryset = queryset.filter( assignee_set__project=project, assignee_set__user=assigned_to, ) elif unassigned in (True, False): queryset = queryset.filter( assignee_set__isnull=unassigned, ) if first_release: if first_release is EMPTY: return queryset.none() queryset = queryset.filter( first_release__project=project, first_release__version=first_release, ) if tags: matches = self._tags_to_filter(project, tags) if not matches: return queryset.none() queryset = queryset.filter( id__in=matches, ) if age_from or age_to: params = {} if age_from: if age_from_inclusive: params['first_seen__gte'] = age_from else: params['first_seen__gt'] = age_from if age_to: if age_to_inclusive: params['first_seen__lte'] = age_to else: params['first_seen__lt'] = age_to queryset = queryset.filter(**params) if date_from or date_to: params = { 'project_id': project.id, } if date_from: if date_from_inclusive: params['datetime__gte'] = date_from else: params['datetime__gt'] = date_from if date_to: if date_to_inclusive: params['datetime__lte'] = date_to else: params['datetime__lt'] = date_to event_queryset = Event.objects.filter(**params) # limit to the first 1000 results group_ids = event_queryset.distinct().values_list( 'group_id', flat=True )[:1000] # if Event is not on the primary database remove Django's # implicit subquery by coercing to a list base = router.db_for_read(Group) using = router.db_for_read(Event) # MySQL also cannot do a LIMIT inside of a subquery if base != using or engine.startswith('mysql'): group_ids = list(group_ids) queryset = queryset.filter( id__in=group_ids, ) if engine.startswith('sqlite'): score_clause = SQLITE_SORT_CLAUSES[sort_by] elif engine.startswith('mysql'): score_clause = MYSQL_SORT_CLAUSES[sort_by] elif engine.startswith('oracle'): score_clause = ORACLE_SORT_CLAUSES[sort_by] elif engine in MSSQL_ENGINES: score_clause = MSSQL_SORT_CLAUSES[sort_by] else: score_clause = SORT_CLAUSES[sort_by] queryset = queryset.extra( select={'sort_value': score_clause}, ) return queryset
def get_db(self): db_alias = router.db_for_read(self._meta.model) db_settings = connections.databases[db_alias] if db_settings['ENGINE'] != 'django_atlassian.backends.jira': return None return connections[db_alias]
def get_related_objects(instances, filters=None): queryset = None apply_filters = mk_filter_function(filters) # functioning defining a relationship if type(rel_obj_descriptor) in (types.FunctionType, types.MethodType): return rel_obj_descriptor(instances, apply_filters) # FK from instance to a related object elif type(rel_obj_descriptor) == ReverseSingleRelatedObjectDescriptor: field = rel_obj_descriptor.field rel_obj_attr = field.get_foreign_related_value instance_attr = field.get_local_related_value query = {'%s__in' % field.related_query_name(): instances} rel_mgr = field.rel.to._default_manager # If the related manager indicates that it should be used for related # fields, respect that. if getattr(rel_mgr, 'use_for_related_fields', False): queryset = rel_mgr else: queryset = QuerySet(field.rel.to) queryset = queryset.filter(**query).only('pk') table = instances[0]._meta.db_table pk_field = instances[0]._meta.pk.column related_table = field.related_field.model._meta.db_table if table == related_table: # XXX hack: assuming django uses T2 for joining two tables of same name table = 'T2' queryset = queryset.extra( select={INPUT_ATTR_PREFIX: '%s.%s' % (table, pk_field)}) # reverse FK from instance to related objects with FK to the instance elif type(rel_obj_descriptor) == ForeignRelatedObjectsDescriptor: rel_field = rel_obj_descriptor.related.field rel_obj_attr = rel_field.get_local_related_value rel_column = rel_field.column rel_model = rel_obj_descriptor.related.model rel_mgr = rel_model._default_manager.__class__() rel_mgr.model = rel_model query = {'%s__in' % rel_field.name: instances} queryset = rel_mgr.get_queryset().filter(**query).only('pk') queryset = queryset.extra( select={INPUT_ATTR_PREFIX: '%s' % rel_column}) # M2M from instance to related objects elif type(rel_obj_descriptor) in (ReverseManyRelatedObjectsDescriptor, ManyRelatedObjectsDescriptor): db = router.db_for_read(instance.__class__, instance=instance) connection = connections[db] mgr = rel_obj_descriptor.__get__(instance) query = {'%s__in' % mgr.query_field_name: instances} queryset = super(mgr.__class__, mgr).get_queryset().filter(**query).only('pk') fk = mgr.through._meta.get_field(mgr.source_field_name) join_table = mgr.through._meta.db_table qn = connection.ops.quote_name queryset = queryset.extra(select=dict( ('%s%s' % (INPUT_ATTR_PREFIX, f.attname), '%s.%s' % (qn(join_table), qn(f.column))) for f in fk.local_related_fields)) # if you just do 'if queryset', that triggers query execution because # python checks length of the enumerable. to prevent query execution, check # if queryset is not None. if queryset is not None: return apply_filters(queryset) return []
def setUpClass(cls): super(SerializationTests, cls).setUpClass() cls.domain = uuid.uuid4().hex cls.using = router.db_for_read(XFormInstanceSQL, **{HINT_PLPROXY: True})
def lifetime_value(provider=None): #pylint:disable=too-many-locals,too-many-statements # Contract value (i.e. "Total Sales") kwargs = {'orig_organization': provider} if provider else {} contract_values = Transaction.objects.filter( dest_account=Transaction.PAYABLE, **kwargs).values(slug=F('dest_organization__slug')).annotate( amount=Sum('orig_amount'), unit=F('orig_unit')).order_by('dest_organization__slug') by_profiles = { val['slug']: { val['unit']: { 'contract_value': val['amount'] } } for val in contract_values } # Payments # Transfers of funds to provider. The only sources are # processor:Funds (online) and subscriber:Liability (offline). # # Payments of funds from subscriber shows as subscriber:Liability # to provider:Funds transactions. if provider: provider_clause = "AND dest_organization_id = %d" % provider.pk else: provider_clause = ("AND NOT dest_organization_id IN (%d)" % settings.PROCESSOR_ID) # We need a `DISTINCT` statement for charges that pay # multiple subscriptions to the same provider. payments_query = """WITH transfers AS ( SELECT * FROM saas_transaction WHERE orig_account='%(funds)s' AND (dest_account='%(funds)s' OR dest_account='%(offline)s') %(provider_clause)s), payments AS ( SELECT * FROM saas_transaction WHERE orig_account='%(liability)s' AND dest_account='%(funds)s' AND dest_organization_id IN %(processor_ids)s), matched_transfers_payments AS ( SELECT DISTINCT payments.event_id, payments.orig_organization_id, payments.dest_unit, payments.dest_amount FROM transfers INNER JOIN payments ON transfers.event_id = payments.event_id) SELECT saas_organization.slug, matched_transfers_payments.dest_unit, SUM(matched_transfers_payments.dest_amount) FROM matched_transfers_payments INNER JOIN saas_organization ON saas_organization.id = matched_transfers_payments.orig_organization_id GROUP BY saas_organization.slug, matched_transfers_payments.dest_unit""" % { 'provider_clause': provider_clause, 'processor_ids': '(%d)' % settings.PROCESSOR_ID, 'funds': Transaction.FUNDS, 'offline': Transaction.OFFLINE, 'liability': Transaction.LIABILITY } # XXX transfers: without processor fee, payments: with processor fee. with connection.cursor() as cursor: cursor.execute(payments_query, params=None) for row in cursor.fetchall(): organization_slug = row[0] unit = row[1] amount = row[2] account = Transaction.LIABILITY if organization_slug not in by_profiles: by_profiles[organization_slug] = {unit: {account: amount}} else: if unit not in by_profiles[organization_slug]: by_profiles[organization_slug].update( {unit: { account: amount }}) else: by_profiles[organization_slug][unit].update( {account: amount}) kwargs = {'dest_organization': provider} if provider else {} refunds = Transaction.objects.filter( dest_account=Transaction.REFUND, orig_account=Transaction.REFUNDED, **kwargs).values( slug=F('orig_organization__slug'), unit=F('orig_unit')).annotate( amount=Sum('orig_amount')).order_by('orig_organization__slug') for val in refunds: organization_slug = val['slug'] unit = val['unit'] amount = val['amount'] account = Transaction.REFUNDED if organization_slug not in by_profiles: by_profiles[organization_slug] = {unit: {account: amount}} else: if unit not in by_profiles[organization_slug]: by_profiles[organization_slug].update( {unit: { account: amount }}) else: by_profiles[organization_slug][unit].update({account: amount}) # deferred revenue if is_sqlite3(router.db_for_read(Transaction)): extract_number = 'substr(saas_transaction.event_id, 5)' else: # We would use `substr(saas_transaction.event_id, 5)` which is syntax- # compatible if it were not for Postgresql to through an execption on # the trailing '/' character. extract_number = ( r"substring(saas_transaction.event_id from 'sub_(\d+)/')") deferred_revenues_query = """ SELECT saas_organization.slug, saas_transaction.dest_unit, SUM(saas_transaction.dest_amount) FROM saas_organization INNER JOIN saas_subscription ON saas_organization.id = saas_subscription.organization_id INNER JOIN saas_plan ON saas_subscription.plan_id = saas_plan.id INNER JOIN saas_transaction ON cast(%(extract_number)s AS integer) = saas_subscription.id WHERE saas_transaction.dest_account = '%(backlog)s' %(provider_clause)s GROUP BY saas_organization.slug, saas_transaction.dest_unit """ % { 'backlog': Transaction.BACKLOG, 'extract_number': extract_number, 'provider_clause': ("AND saas_plan.organization_id = %d" % provider.pk if provider else "") } with connection.cursor() as cursor: cursor.execute(deferred_revenues_query, params=None) for row in cursor.fetchall(): organization_slug = row[0] unit = row[1] amount = row[2] account = Transaction.BACKLOG if organization_slug not in by_profiles: by_profiles[organization_slug] = {unit: {account: amount}} else: if unit not in by_profiles[organization_slug]: by_profiles[organization_slug].update( {unit: { account: amount }}) else: by_profiles[organization_slug][unit].update( {account: amount}) results = {} for slug, by_units in six.iteritems(by_profiles): for unit, val in six.iteritems(by_units): payments = (val.get(Transaction.LIABILITY, 0) - val.get(Transaction.REFUNDED, 0)) backlog = val.get(Transaction.BACKLOG, 0) deferred_revenue = payments - backlog if payments > backlog else 0 val.update({ 'contract_value': val.get('contract_value', 0), 'payments': payments, 'deferred_revenue': deferred_revenue }) if slug not in results: results[slug] = {unit: val} else: if unit not in results[slug]: results[slug].update({unit: val}) else: results[slug][unit].update(val) return results
def get_related(queryset): using = router.db_for_read(queryset.model) coll = Collector(using=using) coll.collect(queryset) return coll.data
def using(self, alias): if alias == router.db_for_read(self.model): return self else: return super(SharedMemoryManager, self).using(alias)
def delete_translation(self, request, object_id, extra_context=None): language = get_language_from_request(request) opts = self.model._meta translationopts = self.translated_model._meta app_label = translationopts.app_label try: obj = self.queryset(request).get(pk=unquote(object_id)) except self.model.DoesNotExist: # Don't raise Http404 just yet, because we haven't checked # permissions yet. We don't want an unauthenticated user to be able # to determine whether a given object exists. obj = None if not self.has_delete_permission(request, obj): raise PermissionDenied if obj is None: raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)}) if not len(translation_pool.annotate_with_translations(obj).translations) > 1: raise Http404(_('There only exists one translation for this page')) translationobj = get_object_or_404(self.translated_model, **{self.translation_of_field + '__id': object_id, 'language': language}) if django.VERSION[1] > 2: # pragma: no cover # WARNING: Django 1.3 is not officially supported yet! using = router.db_for_read(self.model) kwargs = { 'admin_site': self.admin_site, 'user': request.user, 'using': using } else: kwargs = { 'admin_site': self.admin_site, 'user': request.user, } deleted_objects, perms_needed = get_deleted_objects([translationobj], translationopts, **kwargs)[:2] if request.method == 'POST': if perms_needed: raise PermissionDenied message = _('%(obj_name)s with language %(language)s was deleted') % { 'language': [name for code, name in settings.LANGUAGES if code == language][0], 'obj_name': force_text(translationopts.verbose_name)} self.log_change(request, translationobj, message) self.message_user(request, message) translationobj.delete() if not self.has_change_permission(request, None): return HttpResponseRedirect("../../../../") return HttpResponseRedirect("../../") context = { "title": _("Are you sure?"), "object_name": force_text(translationopts.verbose_name), "object": translationobj, "deleted_objects": deleted_objects, "perms_lacking": perms_needed, "opts": translationopts, "root_path": self.admin_site.root_path, "app_label": app_label, } context.update(extra_context or {}) context_instance = RequestContext(request, current_app=self.admin_site.name) return render_to_response(self.delete_confirmation_template or [ "admin/%s/%s/delete_confirmation.html" % (app_label, translationopts.object_name.lower()), "admin/%s/delete_confirmation.html" % app_label, "admin/delete_confirmation.html" ], context, context_instance=context_instance)
def delete_translation(self, request, object_id, extra_context=None): if 'language' in request.GET: language = request.GET['language'] else: language = get_language_from_request(request) opts = Article._meta titleopts = Title._meta app_label = titleopts.app_label pluginopts = CMSPlugin._meta try: obj = self.get_queryset(request).get(pk=unquote(object_id)) except self.model.DoesNotExist: # Don't raise Http404 just yet, because we haven't checked # permissions yet. We don't want an unauthenticated user to be able # to determine whether a given object exists. obj = None if not self.has_delete_permission(request, obj): return HttpResponseForbidden( force_text( _('You do not have permission to change this article'))) if obj is None: raise Http404( _('%(name)s object with primary key %(key)r does not exist.') % { 'name': force_text(opts.verbose_name), 'key': escape(object_id) }) if not len(list(obj.get_languages())) > 1: raise Http404( _('There only exists one translation for this article')) titleobj = get_object_or_404(Title, article__id=object_id, language=language) saved_plugins = CMSPlugin.objects.filter( placeholder__article__id=object_id, language=language) using = router.db_for_read(self.model) kwargs = { 'admin_site': self.admin_site, 'user': request.user, 'using': using } deleted_objects, __, perms_needed = get_deleted_objects([titleobj], titleopts, **kwargs)[:3] to_delete_plugins, __, perms_needed_plugins = get_deleted_objects( saved_plugins, pluginopts, **kwargs)[:3] deleted_objects.append(to_delete_plugins) perms_needed = set(list(perms_needed) + list(perms_needed_plugins)) if request.method == 'POST': if perms_needed: raise PermissionDenied message = _( 'Title and plugins with language %(language)s was deleted') % { 'language': force_text( get_language_object(language)['name']) } self.log_change(request, titleobj, message) messages.info(request, message) titleobj.delete() for p in saved_plugins: p.delete() public = obj.publisher_public if public: public.save() if not self.has_change_permission(request, None): return HttpResponseRedirect(admin_reverse('index')) return HttpResponseRedirect( admin_reverse('cms_articles_article_changelist')) context = { 'title': _('Are you sure?'), 'object_name': force_text(titleopts.verbose_name), 'object': titleobj, 'deleted_objects': deleted_objects, 'perms_lacking': perms_needed, 'opts': opts, 'root_path': admin_reverse('index'), 'app_label': app_label, } context.update(extra_context or {}) request.current_app = self.admin_site.name return render( request, self.delete_confirmation_template or [ 'admin/%s/%s/delete_confirmation.html' % (app_label, titleopts.object_name.lower()), 'admin/%s/delete_confirmation.html' % app_label, 'admin/delete_confirmation.html' ], context)
def db(self): return self._db or router.db_for_read(self.model, **self._hints)
def track(user, to_track, log=False, **kwargs): """ Enables a user to track objects or change his tracking options for these objects. :param to_track: the object(s) to track :param log: should an action be logged if a tracker is created? :param verbs (kwarg): the verbs to track. None means 'track all verbs' :param actor_only (kwarg): should we track actions only when the object is the actor? """ from .models import Tracker from .gfk import get_content_type, get_pk # convert to_track and verbs to sets to_track = to_set(to_track) kwargs['verbs'] = to_set(kwargs.get('verbs', None)) # create query to retrieve matching trackers db = kwargs.pop('using', None) db_from_model = False q = Q() for obj in to_track: pk = get_pk(obj) q |= Q(tracked_ct=get_content_type(obj), tracked_pk=pk) if pk: db = obj._state.db elif not db: db = router.db_for_read(obj._meta.model) db_from_model = True if db_from_model: warnings.warn('The database to use for the tracker has been ' 'automatically set to the default database of the model to track. ' 'You may want to provide a db alias with the "using" kwarg.', Warning) q = q & Q(user=user) # fetch matching trackers trackers = list(Tracker.objects.db_manager(db).filter(q) .prefetch_related('tracked')) tracked_objs = [] # modify existing matching trackers if needed for tracker in trackers: changed = [] for k, v in kwargs.items(): if getattr(tracker, k, None) != v: changed.append(k) setattr(tracker, k, v) if changed: tracker.save() tracked_objs.append(tracker.tracked) last_updated = kwargs.pop('last_updated', None) if last_updated is not None: kwargs['last_updated'] = last_updated # create trackers to untracked objects untracked_objs = to_track.difference(tracked_objs) for obj in untracked_objs: trackers.append(Tracker.objects.db_manager(db) .create(user=user, tracked=obj, **kwargs)) if log and untracked_objs: log_action(user, verb=_('started tracking'), targets=untracked_objs)
def _aggregate_transactions_change_by_period(organization, account, date_periods, orig='orig', dest='dest'): """ Returns a table of records over a period of 12 months *from_date*. """ #pylint:disable=too-many-locals,too-many-arguments,too-many-statements #pylint:disable=invalid-name customers = [] receivables = [] new_customers = [] new_receivables = [] churn_customers = [] churn_receivables = [] unit = None period_start = date_periods[0] for period_end in date_periods[1:]: delta = Plan.get_natural_period(1, organization.natural_interval) prev_period_end = period_end - delta prev_period_start = prev_period_end - relativedelta( period_end, period_start) LOGGER.debug( "computes churn between periods ['%s', '%s'] and ['%s', '%s']", prev_period_start.isoformat(), prev_period_end.isoformat(), period_start.isoformat(), period_end.isoformat()) try: churn_query = RawQuery( """SELECT COUNT(DISTINCT(prev.%(dest)s_organization_id)), SUM(prev.%(dest)s_amount), prev.%(dest)s_unit FROM saas_transaction prev LEFT OUTER JOIN ( SELECT distinct(%(dest)s_organization_id), %(orig)s_unit FROM saas_transaction WHERE created_at >= '%(period_start)s' AND created_at < '%(period_end)s' AND %(orig)s_organization_id = '%(organization_id)s' AND %(orig)s_account = '%(account)s' ) curr ON prev.%(dest)s_organization_id = curr.%(dest)s_organization_id WHERE prev.created_at >= '%(prev_period_start)s' AND prev.created_at < '%(prev_period_end)s' AND prev.%(orig)s_organization_id = '%(organization_id)s' AND prev.%(orig)s_account = '%(account)s' AND curr.%(dest)s_organization_id IS NULL GROUP BY prev.%(dest)s_unit """ % { "orig": orig, "dest": dest, "prev_period_start": prev_period_start, "prev_period_end": prev_period_end, "period_start": period_start, "period_end": period_end, "organization_id": organization.id, "account": account }, router.db_for_read(Transaction)) churn_customer, churn_receivable, churn_receivable_unit = next( iter(churn_query)) if churn_receivable_unit: unit = churn_receivable_unit except StopIteration: churn_customer, churn_receivable, churn_receivable_unit = 0, 0, None # A bit ugly but it does the job ... if orig == 'orig': kwargs = { 'orig_organization': organization, 'orig_account': account } else: kwargs = { 'dest_organization': organization, 'dest_account': account } customer = 0 receivable = 0 receivable_unit = None query_result = Transaction.objects.filter( created_at__gte=period_start, created_at__lt=period_end, **kwargs).values('%s_unit' % dest).annotate( count=Count('%s_organization' % dest, distinct=True), sum=Sum('%s_amount' % dest)) if query_result: customer = query_result[0]['count'] receivable = query_result[0]['sum'] receivable_unit = query_result[0]['%s_unit' % dest] if receivable_unit: unit = receivable_unit try: new_query = RawQuery( """SELECT count(distinct(curr.%(dest)s_organization_id)), SUM(curr.%(dest)s_amount), curr.%(dest)s_unit FROM saas_transaction curr LEFT OUTER JOIN ( SELECT distinct(%(dest)s_organization_id) FROM saas_transaction WHERE created_at >= '%(prev_period_start)s' AND created_at < '%(prev_period_end)s' AND %(orig)s_organization_id = '%(organization_id)s' AND %(orig)s_account = '%(account)s') prev ON curr.%(dest)s_organization_id = prev.%(dest)s_organization_id WHERE curr.created_at >= '%(period_start)s' AND curr.created_at < '%(period_end)s' AND curr.%(orig)s_organization_id = '%(organization_id)s' AND curr.%(orig)s_account = '%(account)s' AND prev.%(dest)s_organization_id IS NULL GROUP BY curr.%(dest)s_unit""" % { "orig": orig, "dest": dest, "prev_period_start": prev_period_start, "prev_period_end": prev_period_end, "period_start": period_start, "period_end": period_end, "organization_id": organization.id, "account": account }, router.db_for_read(Transaction)) new_customer, new_receivable, new_receivable_unit = next( iter(new_query)) if new_receivable_unit: unit = new_receivable_unit except StopIteration: new_customer, new_receivable, new_receivable_unit = 0, 0, None units = get_different_units(churn_receivable_unit, receivable_unit, new_receivable_unit) if len(units) > 1: LOGGER.error("different units: %s", units) period = period_end churn_customers += [(period, churn_customer)] churn_receivables += [(period, int(churn_receivable or 0))] customers += [(period, customer)] receivables += [(period, int(receivable or 0))] new_customers += [(period, new_customer)] new_receivables += [(period, int(new_receivable or 0))] period_start = period_end return ((churn_customers, customers, new_customers), (churn_receivables, receivables, new_receivables), unit)
def filter(self, record): db = router.db_for_read(None) record.schema_name = connections[db].tenant.schema_name record.domain_url = getattr(connections[db].tenant, 'domain_url', '') return True
def _collection_for_read(self): db = router.db_for_read(self.cache_model_class) return connections[db].database[self._table]
def load_and_store_template(self, template_name, cache_key, site, **params): template = Template.objects.get(name__exact=template_name, **params) db = router.db_for_read(Template, instance=template) display_name = 'dbtemplates:%s:%s:%s' % (db, template_name, site.domain) return set_and_return(cache_key, template.content, display_name)
def aggregate_monthly_churn(organization, account, interval, from_date=None, orig='orig', dest='dest'): """ Returns a table of records over a period of 12 months *from_date*. """ #pylint: disable=too-many-locals,too-many-arguments customers = [] receivables = [] new_customers = [] new_receivables = [] churn_customers = [] churn_receivables = [] # We want to be able to compare *last* to *from_date* and not get django # warnings because timezones are not specified. dates = month_periods(13, from_date) trail_period_start = dates[0] period_start = dates[1] for period_end in dates[2:]: if interval == Plan.YEARLY: prev_period_start = datetime(day=period_start.day, month=period_start.month, year=period_start.year - 1, tzinfo=period_start.tzinfo) prev_period_end = datetime(day=period_end.day, month=period_end.month, year=period_end.year - 1, tzinfo=period_end.tzinfo) else: # default to monthly prev_period_start = trail_period_start prev_period_end = period_start churn_query = RawQuery( """SELECT COUNT(DISTINCT(prev.%(dest)s_organization_id)), SUM(prev.%(dest)s_amount) FROM saas_transaction prev LEFT OUTER JOIN ( SELECT distinct(%(dest)s_organization_id) FROM saas_transaction WHERE created_at >= '%(period_start)s' AND created_at < '%(period_end)s' AND %(orig)s_organization_id = '%(organization_id)s' AND %(orig)s_account = '%(account)s') curr ON prev.%(dest)s_organization_id = curr.%(dest)s_organization_id WHERE prev.created_at >= '%(prev_period_start)s' AND prev.created_at < '%(prev_period_end)s' AND prev.%(orig)s_organization_id = '%(organization_id)s' AND prev.%(orig)s_account = '%(account)s' AND curr.%(dest)s_organization_id IS NULL""" % { "orig": orig, "dest": dest, "prev_period_start": prev_period_start, "prev_period_end": prev_period_end, "period_start": period_start, "period_end": period_end, "organization_id": organization.id, "account": account }, router.db_for_read(Transaction)) churn_customer, churn_receivable = iter(churn_query).next() # A bit ugly but it does the job ... if orig == 'orig': kwargs = { 'orig_organization': organization, 'orig_account': account } else: kwargs = { 'dest_organization': organization, 'dest_account': account } query_result = Transaction.objects.filter( created_at__gte=period_start, created_at__lt=period_end, **kwargs).aggregate(Count('%s_organization' % dest, distinct=True), Sum('%s_amount' % dest)) customer = query_result['%s_organization__count' % dest] receivable = query_result['%s_amount__sum' % dest] new_query = RawQuery( """SELECT count(distinct(curr.%(dest)s_organization_id)), SUM(curr.%(dest)s_amount) FROM saas_transaction curr LEFT OUTER JOIN ( SELECT distinct(%(dest)s_organization_id) FROM saas_transaction WHERE created_at >= '%(prev_period_start)s' AND created_at < '%(prev_period_end)s' AND %(orig)s_organization_id = '%(organization_id)s' AND %(orig)s_account = '%(account)s') prev ON curr.%(dest)s_organization_id = prev.%(dest)s_organization_id WHERE curr.created_at >= '%(period_start)s' AND curr.created_at < '%(period_end)s' AND curr.%(orig)s_organization_id = '%(organization_id)s' AND curr.%(orig)s_account = '%(account)s' AND prev.%(dest)s_organization_id IS NULL""" % { "orig": orig, "dest": dest, "prev_period_start": prev_period_start, "prev_period_end": prev_period_end, "period_start": period_start, "period_end": period_end, "organization_id": organization.id, "account": account }, router.db_for_read(Transaction)) new_customer, new_receivable = iter(new_query).next() period = period_end churn_customers += [(period, churn_customer)] churn_receivables += [(period, int(churn_receivable or 0))] customers += [(period, customer)] receivables += [(period, int(receivable or 0))] new_customers += [(period, new_customer)] new_receivables += [(period, int(new_receivable or 0))] trail_period_start = period_start period_start = period_end return ((churn_customers, customers, new_customers), (churn_receivables, receivables, new_receivables))
def setUp(self): super().setUp() self.using = router.db_for_read(CaseAttachmentSQL, **{HINT_PLPROXY: True})
def _cursor(): """Return a DB cursor for reading.""" return connections[router.db_for_read(Document)].cursor()
def _aggregate_transactions_change_by_period(organization, account, date_periods, orig='orig', dest='dest'): """ Returns a table of records over a period of 12 months *from_date*. """ #pylint: disable=too-many-locals,too-many-arguments,invalid-name customers = [] receivables = [] new_customers = [] new_receivables = [] churn_customers = [] churn_receivables = [] period_start = date_periods[0] for period_end in date_periods[1:]: delta = Plan.get_natural_period(1, organization.natural_interval) prev_period_end = period_end - delta prev_period_start = prev_period_end - relativedelta( period_end, period_start) LOGGER.debug( "computes churn between periods ['%s', '%s'] and ['%s', '%s']", prev_period_start.isoformat(), prev_period_end.isoformat(), period_start.isoformat(), period_end.isoformat()) churn_query = RawQuery( """SELECT COUNT(DISTINCT(prev.%(dest)s_organization_id)), SUM(prev.%(dest)s_amount) FROM saas_transaction prev LEFT OUTER JOIN ( SELECT distinct(%(dest)s_organization_id) FROM saas_transaction WHERE created_at >= '%(period_start)s' AND created_at < '%(period_end)s' AND %(orig)s_organization_id = '%(organization_id)s' AND %(orig)s_account = '%(account)s') curr ON prev.%(dest)s_organization_id = curr.%(dest)s_organization_id WHERE prev.created_at >= '%(prev_period_start)s' AND prev.created_at < '%(prev_period_end)s' AND prev.%(orig)s_organization_id = '%(organization_id)s' AND prev.%(orig)s_account = '%(account)s' AND curr.%(dest)s_organization_id IS NULL""" % { "orig": orig, "dest": dest, "prev_period_start": prev_period_start, "prev_period_end": prev_period_end, "period_start": period_start, "period_end": period_end, "organization_id": organization.id, "account": account }, router.db_for_read(Transaction)) churn_customer, churn_receivable = next(iter(churn_query)) # A bit ugly but it does the job ... if orig == 'orig': kwargs = { 'orig_organization': organization, 'orig_account': account } else: kwargs = { 'dest_organization': organization, 'dest_account': account } query_result = Transaction.objects.filter( created_at__gte=period_start, created_at__lt=period_end, **kwargs).aggregate(Count('%s_organization' % dest, distinct=True), Sum('%s_amount' % dest)) customer = query_result['%s_organization__count' % dest] receivable = query_result['%s_amount__sum' % dest] new_query = RawQuery( """SELECT count(distinct(curr.%(dest)s_organization_id)), SUM(curr.%(dest)s_amount) FROM saas_transaction curr LEFT OUTER JOIN ( SELECT distinct(%(dest)s_organization_id) FROM saas_transaction WHERE created_at >= '%(prev_period_start)s' AND created_at < '%(prev_period_end)s' AND %(orig)s_organization_id = '%(organization_id)s' AND %(orig)s_account = '%(account)s') prev ON curr.%(dest)s_organization_id = prev.%(dest)s_organization_id WHERE curr.created_at >= '%(period_start)s' AND curr.created_at < '%(period_end)s' AND curr.%(orig)s_organization_id = '%(organization_id)s' AND curr.%(orig)s_account = '%(account)s' AND prev.%(dest)s_organization_id IS NULL""" % { "orig": orig, "dest": dest, "prev_period_start": prev_period_start, "prev_period_end": prev_period_end, "period_start": period_start, "period_end": period_end, "organization_id": organization.id, "account": account }, router.db_for_read(Transaction)) new_customer, new_receivable = next(iter(new_query)) period = period_end churn_customers += [(period, churn_customer)] churn_receivables += [(period, int(churn_receivable or 0))] customers += [(period, customer)] receivables += [(period, int(receivable or 0))] new_customers += [(period, new_customer)] new_receivables += [(period, int(new_receivable or 0))] period_start = period_end return ((churn_customers, customers, new_customers), (churn_receivables, receivables, new_receivables))
def _get_database_connection(cls, action): return { 'read': connections[router.db_for_read(cls)], 'write': connections[router.db_for_write(cls)] }[action]
def read_connection(cls): if connections is None: return connection else: return connections[router.db_for_read(cls)]
def get_from_cache(self, **kwargs): """ Wrapper around QuerySet.get which supports caching of the intermediate value. Callee is responsible for making sure the cache key is cleared on save. """ if not self.cache_fields or len(kwargs) > 1: return self.get(**kwargs) key, value = next(six.iteritems(kwargs)) pk_name = self.model._meta.pk.name if key == "pk": key = pk_name # We store everything by key references (vs instances) if isinstance(value, Model): value = value.pk # Kill __exact since it's the default behavior if key.endswith("__exact"): key = key.split("__exact", 1)[0] if key in self.cache_fields or key == pk_name: cache_key = self.__get_lookup_cache_key(**{key: value}) local_cache = self._get_local_cache() if local_cache is not None: result = local_cache.get(cache_key) if result is not None: return result retval = cache.get(cache_key, version=self.cache_version) if retval is None: result = self.get(**kwargs) # Ensure we're pushing it into the cache self.__post_save(instance=result) if local_cache is not None: local_cache[cache_key] = result return result # If we didn't look up by pk we need to hit the reffed # key if key != pk_name: result = self.get_from_cache(**{pk_name: retval}) if local_cache is not None: local_cache[cache_key] = result return result if not isinstance(retval, self.model): if settings.DEBUG: raise ValueError( "Unexpected value type returned from cache") logger.error("Cache response returned invalid value %r", retval) return self.get(**kwargs) if key == pk_name and int(value) != retval.pk: if settings.DEBUG: raise ValueError("Unexpected value returned from cache") logger.error("Cache response returned invalid value %r", retval) return self.get(**kwargs) retval._state.db = router.db_for_read(self.model, **kwargs) return retval else: return self.get(**kwargs)
def get_query_set(self, **db_hints): return CachedQuerySet(self.field.rel.to).using(\ router.db_for_read(self.field.rel.to, **db_hints))