def loaddb(app_labels, format, options): from django.db import connection, transaction, backend if options.verbose: print "Begin to load data for %s format...\n" % format models = _get_table_order(app_labels) cursor = connection.cursor() errornum = 0 if not options.remain and not options.stdout: m = models[:] m.reverse() for model in m: cursor.execute('DELETE FROM %s WHERE 1=1;' % backend.quote_name(model._meta.db_table)) for table, fields in get_model_many2many_stru(model): cursor.execute('DELETE FROM %s WHERE 1=1;' % backend.quote_name(table)) success = True for model in models: try: load_model(cursor, model, format, options) for table, fields in get_model_many2many_stru(model): load_model(cursor, (table, fields), format, options) except Exception, e: success = False errornum += 1
def iterator(self): try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration # self._fields is a list of field names to fetch. if self._fields: columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] field_names = self._fields else: # Default to all fields. columns = [f.column for f in self.model._meta.fields] field_names = [f.attname for f in self.model._meta.fields] select = ['%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns] epls, desls = self.model.get_read_labeling (self._itag_has) cursor = connection.cursor (epls, desls) cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) while 1: rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) if not rows: raise StopIteration for row in rows: yield dict(zip(field_names, row))
def __get__(self, instance, instance_type=None): if instance is None: raise AttributeError, "Manager must be accessed via instance" # This import is done here to avoid circular import importing this module from django.contrib.contenttypes.models import ContentType # Dynamically create a class that subclasses the related model's # default manager. rel_model = self.field.rel.to superclass = rel_model._default_manager.__class__ RelatedManager = create_generic_related_manager(superclass) manager = RelatedManager( model = rel_model, instance = instance, symmetrical = (self.field.rel.symmetrical and instance.__class__ == rel_model), join_table = backend.quote_name(self.field.m2m_db_table()), source_col_name = backend.quote_name(self.field.m2m_column_name()), target_col_name = backend.quote_name(self.field.m2m_reverse_name()), content_type = ContentType.objects.get_for_model(self.field.model), content_type_field_name = self.field.content_type_field_name, object_id_field_name = self.field.object_id_field_name ) return manager
def __init__(self, sql): """ find Model:alias and alias.attr patterns in sql, and replace with proper sql references. Remembers the Model tables selected. """ from django.db import backend self.caller_frame = get_parent_frame() self.column_start = 0 self.table_by_alias = {} self.table_inorder = [] self.sql_orig = sql self.sql = sql # find all the tables that map to a Model object. They're # named like Model:alias self.sql = self.re_model_table.sub(self.match_model_table, self.sql) # find all instances of Model table aliases, and replace them # with aliasses that are more SQL friendly. self.sql = self.re_table_alias.sub(self.match_table_alias, self.sql) # add a select clause that selects all fields in Model tables sql_select = "" for table in self.table_inorder: if len(sql_select) > 0: sql_select += ",\n" sql_select += ",\n".join([backend.quote_name(table.sql_alias) + "." + backend.quote_name(f.column) for f in table.fields]); self.sql = "SELECT\n" + sql_select + "\n" + self.sql
def get_related(self, obj, Model, num=None): """ Retrieve instances of ``Model`` which share tags with the model instance ``obj``, ordered by the number of shared tags in descending order. If ``num`` is given, a maximum of ``num`` instances will be returned. """ model_table = backend.quote_name(Model._meta.db_table) content_type = ContentType.objects.get_for_model(obj) related_content_type = ContentType.objects.get_for_model(Model) query = """ SELECT %(model_pk)s, COUNT(related_tagged_item.object_id) AS %(count)s FROM %(model)s, %(tagged_item)s, %(tag)s, %(tagged_item)s related_tagged_item WHERE %(tagged_item)s.object_id = %%s AND %(tagged_item)s.content_type_id = %(content_type_id)s AND %(tag)s.id = %(tagged_item)s.tag_id AND related_tagged_item.content_type_id = %(related_content_type_id)s AND related_tagged_item.tag_id = %(tagged_item)s.tag_id AND %(model_pk)s = related_tagged_item.object_id""" if content_type.id == related_content_type.id: # Exclude the given instance itself if determining related # instances for the same model. query += """ AND related_tagged_item.object_id != %(tagged_item)s.object_id""" query += """ GROUP BY %(model_pk)s ORDER BY %(count)s DESC %(limit_offset)s""" query = query % { 'model_pk': '%s.%s' % (model_table, backend.quote_name(Model._meta.pk.column)), 'count': backend.quote_name('count'), 'model': model_table, 'tagged_item': backend.quote_name(self.model._meta.db_table), 'tag': backend.quote_name( self.model._meta.get_field('tag').rel.to._meta.db_table), 'content_type_id': content_type.id, 'related_content_type_id': related_content_type.id, 'limit_offset': num is not None and backend.get_limit_offset_sql(num) or '', } cursor = connection.cursor() cursor.execute(query, [obj.id]) object_ids = [row[0] for row in cursor.fetchall()] if len(object_ids) > 0: # Use in_bulk here instead of an id__in lookup, because id__in would # clobber the ordering. object_dict = Model._default_manager.in_bulk(object_ids) return [object_dict[object_id] for object_id in object_ids] else: return Model._default_manager.none()
def iterator(self): try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration # self._fields is a list of field names to fetch. if self._fields: columns = [ self.model._meta.get_field(f, many_to_many=False).column for f in self._fields ] field_names = self._fields else: # Default to all fields. columns = [f.column for f in self.model._meta.fields] field_names = [f.attname for f in self.model._meta.fields] select = [ '%s.%s' % (backend.quote_name( self.model._meta.db_table), backend.quote_name(c)) for c in columns ] cursor = connection.cursor() cursor.execute( "SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) while 1: rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) if not rows: raise StopIteration for row in rows: yield dict(zip(field_names, row))
def get_where_clause(lookup_type, table_prefix, field_name, value): if table_prefix.endswith('.'): table_prefix = backend.quote_name(table_prefix[:-1]) + '.' field_name = backend.quote_name(field_name) try: return '%s%s %s' % (table_prefix, field_name, (backend.OPERATOR_MAPPING[lookup_type] % '%s')) except KeyError: pass if lookup_type == 'in': in_string = ','.join(['%s' for id in value]) if in_string: return '%s%s IN (%s)' % (table_prefix, field_name, in_string) else: raise EmptyResultSet elif lookup_type in ('range', 'year'): return '%s%s BETWEEN %%s AND %%s' % (table_prefix, field_name) elif lookup_type in ('month', 'day'): return "%s = %%s" % backend.get_date_extract_sql( lookup_type, table_prefix + field_name) elif lookup_type == 'isnull': return "%s%s IS %sNULL" % (table_prefix, field_name, (not value and 'NOT ' or '')) elif lookup_type == 'search': return backend.get_fulltext_search_sql(table_prefix + field_name) raise TypeError, "Got invalid lookup_type: %s" % repr(lookup_type)
def aggregate(self, function, field): """ Performs a SELECT aggregate(field) and returns the value as an integer. """ q = self._clone() q._order_by = () q._offset = None q._limit = None q._select_related = False try: select, sql, params = q._get_sql_clause() except models.query.EmptyResultSet: return 0 # The id of the counter column id_col = "%s.%s" % (backend.quote_name(self.model._meta.db_table), backend.quote_name(field)) # Execute the query cursor = connection.cursor() if not isinstance(function, list): function = [function,] cursor.execute("SELECT " + ','.join(["%s(%s)" % (f, id_col) for f in function]) + sql, params) result = cursor.fetchone() # Return '0' instead of NULL r = [] for i in result: if i is None: r.append(0) else: r.append(i) result = tuple(r) return result
def iterator(self): try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration # self._fields is a list of field names to fetch. if self._fields: #columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] if not self._select: columns = [ self.model._meta.get_field(f, many_to_many=False).column for f in self._fields ] else: columns = [] for f in self._fields: if f in [field.name for field in self.model._meta.fields]: columns.append( self.model._meta.get_field( f, many_to_many=False).column) elif not self._select.has_key(f): raise FieldDoesNotExist, '%s has no field named %r' % ( self.model._meta.object_name, f) field_names = self._fields else: # Default to all fields. columns = [f.column for f in self.model._meta.fields] field_names = [f.column for f in self.model._meta.fields] select = [ '%s.%s' % (backend.quote_name( self.model._meta.db_table), backend.quote_name(c)) for c in columns ] # Add any additional SELECTs. if self._select: select.extend([ '(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items() ]) if getattr(self, '_db_use_master', False): cursor = connection.write_cursor() else: cursor = connection.read_cursor() cursor.execute( "SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) while 1: rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) if not rows: raise StopIteration for row in rows: yield dict(zip(field_names, row))
def method_set_order(ordered_obj, self, id_list): cursor = connection.cursor() # Example: "UPDATE poll_choices SET _order = %s WHERE poll_id = %s AND id = %s" sql = "UPDATE %s SET %s = %%s WHERE %s = %%s AND %s = %%s" % \ (backend.quote_name(ordered_obj._meta.db_table), backend.quote_name('_order'), backend.quote_name(ordered_obj._meta.order_with_respect_to.column), backend.quote_name(ordered_obj._meta.pk.column)) rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name) cursor.executemany(sql, [(i, rel_val, j) for i, j in enumerate(id_list)]) transaction.commit_unless_managed()
def execute(self): from django.conf import settings import os if settings.CACHE_BACKEND.startswith('db://'): os.environ['TZ'] = settings.TIME_ZONE table_name = settings.CACHE_BACKEND[5:] cursor = connection.cursor() cursor.execute("DELETE FROM %s WHERE %s < UTC_TIMESTAMP()" % \ (backend.quote_name(table_name), backend.quote_name('expires'))) transaction.commit_unless_managed()
def method_get_order(ordered_obj, self): cursor = connection.cursor() # Example: "SELECT id FROM poll_choices WHERE poll_id = %s ORDER BY _order" sql = "SELECT %s FROM %s WHERE %s = %%s ORDER BY %s" % \ (backend.quote_name(ordered_obj._meta.pk.column), backend.quote_name(ordered_obj._meta.db_table), backend.quote_name(ordered_obj._meta.order_with_respect_to.column), backend.quote_name('_order')) rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.rel.field_name) cursor.execute(sql, [rel_val]) return [r[0] for r in cursor.fetchall()]
def orderlist2sql(order_list, opts, prefix=''): if prefix.endswith('.'): prefix = backend.quote_name(prefix[:-1]) + '.' output = [] for f in handle_legacy_orderlist(order_list): if f.startswith('-'): output.append('%s%s DESC' % (prefix, backend.quote_name(orderfield2column(f[1:], opts)))) elif f == '?': output.append(backend.get_random_function_sql()) else: output.append('%s%s ASC' % (prefix, backend.quote_name(orderfield2column(f, opts)))) return ', '.join(output)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): op = is_next and '>' or '<' where = '(%s %s %%s OR (%s = %%s AND %s.%s %s %%s))' % \ (backend.quote_name(field.column), op, backend.quote_name(field.column), backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column), op) param = str(getattr(self, field.attname)) q = self.__class__._default_manager.filter(**kwargs).order_by((not is_next and '-' or '') + field.name, (not is_next and '-' or '') + self._meta.pk.name) q._where.append(where) q._params.extend([param, param, getattr(self, self._meta.pk.attname)]) try: return q[0] except IndexError: raise self.DoesNotExist, "%s matching query does not exist." % self.__class__._meta.object_name
def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = is_next and '>' or '<' order_field = self._meta.order_with_respect_to where = ['%s %s (SELECT %s FROM %s WHERE %s=%%s)' % \ (backend.quote_name('_order'), op, backend.quote_name('_order'), backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)), '%s=%%s' % backend.quote_name(order_field.column)] params = [self._get_pk_val(), getattr(self, order_field.attname)] obj = self._default_manager.order_by('_order').extra(where=where, params=params)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename)
def get_intersection_by_model(self, Model, tags): """ Create a queryset matching instances of the given Model associated with all the given list of Tags. FIXME The query currently used to grab the ids of objects which have all the tags should be all that we need run, using a non-explicit join for the QuerySet returned, as in get_by_model, but there's currently no way to get the required GROUP BY and HAVING clauses into Django's ORM. Once the ORM is capable of this, we should have a solution which requires only a single query and won't have the problem where the number of ids in the IN clause in the QuerySet could exceed the length allowed, as could currently happen. """ tags = get_tag_list(tags) tag_count = len(tags) model_table = backend.quote_name(Model._meta.db_table) # This query selects the ids of all objects which have all the # given tags. query = """ SELECT %(model_pk)s FROM %(model)s, %(tagged_item)s WHERE %(tagged_item)s.content_type_id = %(content_type_id)s AND %(tagged_item)s.tag_id IN (%(tag_id_placeholders)s) AND %(model_pk)s = %(tagged_item)s.object_id GROUP BY %(model_pk)s HAVING COUNT(%(model_pk)s) = %(tag_count)s""" % { 'model_pk': '%s.%s' % (model_table, backend.quote_name(Model._meta.pk.column)), 'model': model_table, 'tagged_item': backend.quote_name(self.model._meta.db_table), 'content_type_id': ContentType.objects.get_for_model(Model).id, 'tag_id_placeholders': ','.join(['%s'] * tag_count), 'tag_count': tag_count, } cursor = connection.cursor() cursor.execute(query, [tag.id for tag in tags]) object_ids = [row[0] for row in cursor.fetchall()] if len(object_ids) > 0: return Model._default_manager.filter(pk__in=object_ids) else: return Model._default_manager.none()
def create_test_db(verbosity=1, autoclobber=False): if verbosity >= 1: print "Creating test database..." # If we're using SQLite, it's more convenient to test against an # in-memory database. if settings.DATABASE_ENGINE == "sqlite3": TEST_DATABASE_NAME = ":memory:" else: suffix = { 'postgresql': get_postgresql_create_suffix, 'postgresql_psycopg2': get_postgresql_create_suffix, 'mysql': get_mysql_create_suffix, 'mysql_old': get_mysql_create_suffix, }.get(settings.DATABASE_ENGINE, lambda: '')() if settings.TEST_DATABASE_NAME: TEST_DATABASE_NAME = settings.TEST_DATABASE_NAME else: TEST_DATABASE_NAME = TEST_DATABASE_PREFIX + settings.DATABASE_NAME # Create the test database and connect to it. We need to autocommit # if the database supports it because PostgreSQL doesn't allow # CREATE/DROP DATABASE statements within transactions. cursor = connection.cursor() _set_autocommit(connection) try: cursor.execute("CREATE DATABASE %s %s" % (backend.quote_name(TEST_DATABASE_NAME), suffix)) except Exception, e: sys.stderr.write("Got an error creating the test database: %s\n" % e) if not autoclobber: confirm = raw_input( "It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_NAME) if autoclobber or confirm == 'yes': try: if verbosity >= 1: print "Destroying old test database..." cursor.execute("DROP DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME)) if verbosity >= 1: print "Creating test database..." cursor.execute( "CREATE DATABASE %s %s" % (backend.quote_name(TEST_DATABASE_NAME), suffix)) except Exception, e: sys.stderr.write( "Got an error recreating the test database: %s\n" % e) sys.exit(2) else:
def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = is_next and '>' or '<' order_field = self._meta.order_with_respect_to where = ['%s %s (SELECT %s FROM %s WHERE %s=%%s)' % \ (backend.quote_name('_order'), op, backend.quote_name('_order'), backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)), '%s=%%s' % backend.quote_name(order_field.column)] params = [self._get_pk_val(), getattr(self, order_field.attname)] obj = self._default_manager.order_by('_order').extra( where=where, params=params)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename)
def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): op = is_next and '>' or '<' where = '(%s %s %%s OR (%s = %%s AND %s.%s %s %%s))' % \ (backend.quote_name(field.column), op, backend.quote_name(field.column), backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column), op) param = str(getattr(self, field.attname)) q = self.__class__._default_manager.filter(**kwargs).order_by( (not is_next and '-' or '') + field.name, (not is_next and '-' or '') + self._meta.pk.name) q._where.append(where) q._params.extend([param, param, getattr(self, self._meta.pk.attname)]) try: return q[0] except IndexError: raise self.DoesNotExist, "%s matching query does not exist." % self.__class__._meta.object_name
def quote_name(s): from django.db import backend if quote_flag == "old": return backend.quote_name(s) else: return backend.DatabaseOperations().quote_name(s)
def orderlist2sql(order_list, opts, prefix=''): if prefix.endswith('.'): prefix = backend.quote_name(prefix[:-1]) + '.' output = [] for f in handle_legacy_orderlist(order_list): if f.startswith('-'): output.append( '%s%s DESC' % (prefix, backend.quote_name(orderfield2column(f[1:], opts)))) elif f == '?': output.append(backend.get_random_function_sql()) else: output.append( '%s%s ASC' % (prefix, backend.quote_name(orderfield2column(f, opts)))) return ', '.join(output)
def count(self): """ Performs a SELECT COUNT() and returns the number of records as an integer. If the queryset is already cached (i.e. self._result_cache is set) this simply returns the length of the cached results set to avoid multiple SELECT COUNT(*) calls. """ if self._result_cache is not None: return len(self._result_cache) counter = self._clone() counter._order_by = () counter._select_related = False offset = counter._offset limit = counter._limit counter._offset = None counter._limit = None try: select, sql, params = counter._get_sql_clause() except EmptyResultSet: return 0 epls, desls = self.model.get_read_labeling (self._itag_has) cursor = connection.cursor (epls, desls) if self._distinct: id_col = "%s.%s" % (backend.quote_name(self.model._meta.db_table), backend.quote_name(self.model._meta.pk.column)) cursor.execute("SELECT COUNT(DISTINCT(%s))" % id_col + sql, params) else: cursor.execute("SELECT COUNT(*)" + sql, params) count = cursor.fetchone()[0] # Apply any offset and limit constraints manually, since using LIMIT or # OFFSET in SQL doesn't change the output of COUNT. if offset: count = max(0, count - offset) if limit: count = min(limit, count) return count
def dump_many2many(model): from django.db import connection, backend cursor = connection.cursor() for table, fields in get_model_many2many_stru(model): cursor.execute('select %s from %s' % (','.join(map(backend.quote_name, fields)), backend.quote_name(table))) yield call_cursor(table, fields, cursor)
def dump_model(model): from django.db import connection, backend opts = model._meta cursor = connection.cursor() fields, default = get_model_stru(model) cursor.execute('select %s from %s' % (','.join(map(backend.quote_name, fields)), backend.quote_name(opts.db_table))) return call_cursor(opts.db_table, fields, cursor)
def contains_group_row_level_perms(self, perm, ct): cursor = connection.cursor() sql = """ SELECT COUNT(*) FROM %s ug, %s rlp, %s ct WHERE rlp.%s = ug.%s AND ug.%s=%%s AND rlp.%s = %%s AND rlp.%s = %%s AND rlp.%s = %%s AND rlp.%s = %%s""" % ( backend.quote_name('auth_user_groups'), backend.quote_name('auth_rowlevelpermission'), backend.quote_name('django_content_type'), backend.quote_name('owner_id'), backend.quote_name('group_id'), backend.quote_name('user_id'), backend.quote_name('negative'), backend.quote_name('owner_ct_id'), backend.quote_name('model_ct_id'), backend.quote_name('permission_id')) cursor.execute(sql, [self.id, False, ContentType.objects.get_for_model(Group).id, ct.id, perm.id]) count = int(cursor.fetchone()[0]) return count > 0
def match_table_alias(self, match): """ second pass: replace references to Model tables with their sql alias """ from django.db import backend alias = match.group("alias") attr = match.group("attr") if not alias in self.table_by_alias: return match.group(0) table = self.table_by_alias[alias] field = table.meta.get_field(attr) return( backend.quote_name(table.sql_alias) + "." + backend.quote_name(field.column) )
def iterator(self): try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration # self._fields is a list of field names to fetch. if self._fields: # columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] if not self._select: columns = [self.model._meta.get_field(f, many_to_many=False).column for f in self._fields] else: columns = [] for f in self._fields: if f in [field.name for field in self.model._meta.fields]: columns.append(self.model._meta.get_field(f, many_to_many=False).column) elif not self._select.has_key(f): raise FieldDoesNotExist, "%s has no field named %r" % (self.model._meta.object_name, f) field_names = self._fields else: # Default to all fields. columns = [f.column for f in self.model._meta.fields] field_names = [f.column for f in self.model._meta.fields] select = ["%s.%s" % (backend.quote_name(self.model._meta.db_table), backend.quote_name(c)) for c in columns] # Add any additional SELECTs. if self._select: select.extend( ["(%s) AS %s" % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items()] ) if getattr(self, "_db_use_master", False): cursor = connection.write_cursor() else: cursor = connection.read_cursor() cursor.execute("SELECT " + (self._distinct and "DISTINCT " or "") + ",".join(select) + sql, params) while 1: rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE) if not rows: raise StopIteration for row in rows: yield dict(zip(field_names, row))
def assign_user_to_admin_job_tasks(user): """ Assigns the given User to all Admin Job Tasks. """ admin_job = Job.objects.get(id=settings.ADMIN_JOB_ID) task_ids = [task.id for task in admin_job.tasks.all()] db_table = Task._meta.get_field('assigned_users', True).m2m_db_table() query = 'INSERT INTO %s (%s,%s) VALUES %s' % ( backend.quote_name(db_table), backend.quote_name('task_id'), backend.quote_name('user_id'), ','.join(['(%s,%s)'] * len(task_ids)), ) params = [] for task_id in task_ids: params.append(task_id) params.append(user.id) cursor = connection.cursor() cursor.execute(query, params)
def count(self): """ Performs a SELECT COUNT() and returns the number of records as an integer. If the queryset is already cached (i.e. self._result_cache is set) this simply returns the length of the cached results set to avoid multiple SELECT COUNT(*) calls. """ if self._result_cache is not None: return len(self._result_cache) counter = self._clone() counter._order_by = () counter._select_related = False offset = counter._offset limit = counter._limit counter._offset = None counter._limit = None try: select, sql, params = counter._get_sql_clause() except EmptyResultSet: return 0 cursor = connection.cursor() if self._distinct: id_col = "%s.%s" % (backend.quote_name(self.model._meta.db_table), backend.quote_name(self.model._meta.pk.column)) cursor.execute("SELECT COUNT(DISTINCT(%s))" % id_col + sql, params) else: cursor.execute("SELECT COUNT(*)" + sql, params) count = cursor.fetchone()[0] # Apply any offset and limit constraints manually, since using LIMIT or # OFFSET in SQL doesn't change the output of COUNT. if offset: count = max(0, count - offset) if limit: count = min(limit, count) return count
def create_test_db(verbosity=1, autoclobber=False): if verbosity >= 1: print "Creating test database..." # If we're using SQLite, it's more convenient to test against an # in-memory database. if settings.DATABASE_ENGINE == "sqlite3": TEST_DATABASE_NAME = ":memory:" else: suffix = { 'postgresql': get_postgresql_create_suffix, 'postgresql_psycopg2': get_postgresql_create_suffix, 'mysql': get_mysql_create_suffix, 'mysql_old': get_mysql_create_suffix, }.get(settings.DATABASE_ENGINE, lambda: '')() if settings.TEST_DATABASE_NAME: TEST_DATABASE_NAME = settings.TEST_DATABASE_NAME else: TEST_DATABASE_NAME = TEST_DATABASE_PREFIX + settings.DATABASE_NAME # Create the test database and connect to it. We need to autocommit # if the database supports it because PostgreSQL doesn't allow # CREATE/DROP DATABASE statements within transactions. cursor = connection.cursor() _set_autocommit(connection) try: cursor.execute("CREATE DATABASE %s %s" % (backend.quote_name(TEST_DATABASE_NAME), suffix)) except Exception, e: sys.stderr.write("Got an error creating the test database: %s\n" % e) if not autoclobber: confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_NAME) if autoclobber or confirm == 'yes': try: if verbosity >= 1: print "Destroying old test database..." cursor.execute("DROP DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME)) if verbosity >= 1: print "Creating test database..." cursor.execute("CREATE DATABASE %s %s" % (backend.quote_name(TEST_DATABASE_NAME), suffix)) except Exception, e: sys.stderr.write("Got an error recreating the test database: %s\n" % e) sys.exit(2) else:
def in_bulk(self, id_list): """ Returns a dictionary mapping each of the given IDs to the object with that ID. """ assert self._limit is None and self._offset is None, \ "Cannot use 'limit' or 'offset' with in_bulk" assert isinstance( id_list, (tuple, list)), "in_bulk() must be provided with a list of IDs." id_list = list(id_list) if id_list == []: return {} qs = self._clone() qs._where.append("%s.%s IN (%s)" % (backend.quote_name(self.model._meta.db_table), backend.quote_name(self.model._meta.pk.column), ",".join(['%s'] * len(id_list)))) qs._params.extend(id_list) return dict([(obj._get_pk_val(), obj) for obj in qs.iterator()])
def iterator(self): from django.db.backends.util import typecast_timestamp self._order_by = () # Clear this because it'll mess things up otherwise. if self._field.null: self._where.append('%s.%s IS NOT NULL' % \ (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column))) try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration sql = 'SELECT %s %s GROUP BY 1 ORDER BY 1 %s' % \ (backend.get_date_trunc_sql(self._kind, '%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column))), sql, self._order) cursor = connection.cursor() cursor.execute(sql, params) # We have to manually run typecast_timestamp(str()) on the results, because # MySQL doesn't automatically cast the result of date functions as datetime # objects -- MySQL returns the values as strings, instead. return [typecast_timestamp(str(row[0])) for row in cursor.fetchall()]
def iterator(self): from django.db.backends.util import typecast_timestamp self._order_by = ( ) # Clear this because it'll mess things up otherwise. if self._field.null: self._where.append('%s.%s IS NOT NULL' % \ (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column))) try: select, sql, params = self._get_sql_clause() except EmptyResultSet: raise StopIteration sql = 'SELECT %s %s GROUP BY 1 ORDER BY 1 %s' % \ (backend.get_date_trunc_sql(self._kind, '%s.%s' % (backend.quote_name(self.model._meta.db_table), backend.quote_name(self._field.column))), sql, self._order) cursor = connection.cursor() cursor.execute(sql, params) # We have to manually run typecast_timestamp(str()) on the results, because # MySQL doesn't automatically cast the result of date functions as datetime # objects -- MySQL returns the values as strings, instead. return [typecast_timestamp(str(row[0])) for row in cursor.fetchall()]
def search(self, query): meta = self.model._meta # Get the table name and column names from the model # in `table_name`.`column_name` style print "*", self._search_field print self._search_field column = meta.get_field(self._search_field, many_to_many=False).column full_name = "%s.%s" % (backend.quote_name(meta.db_table), backend.quote_name(column)) # Create the MATCH...AGAINST expressions fulltext_column = full_name match_expr = ("MATCH(%s) AGAINST (%%s)" % fulltext_column) # Add the extra SELECT and WHERE options return self.extra(select={'relevance': match_expr}, where=[match_expr], params=[query, query])
def search(self, query): meta = self.model._meta # Get the table name and column names from the model # in `table_name`.`column_name` style columns = [meta.get_field(name, many_to_many=False).column for name in self._search_fields] full_names = ["%s.%s" % (backend.quote_name(meta.db_table), backend.quote_name(column)) for column in columns] # Create the MATCH...AGAINST expressions fulltext_columns = ", ".join(full_names) match_expr = ("MATCH(%s) AGAINST (%%s)" % fulltext_columns) # Add the extra SELECT and WHERE options return self.extra(select={'relevance': match_expr}, where=[match_expr], params=[query, query])
def search(self, query): meta = self.model._meta # Get the table name and column names from the model # in `table_name`.`column_name` style columns = [ meta.get_field(name, many_to_many=False).column for name in self._search_fields ] full_names = [ "%s.%s" % (backend.quote_name(meta.db_table), backend.quote_name(column)) for column in columns ] # Create the MATCH...AGAINST expressions fulltext_columns = ", ".join(full_names) match_expr = ("MATCH(%s) AGAINST (%%s)" % fulltext_columns) # Add the extra SELECT and WHERE options return self.extra(select={'relevance': match_expr}, where=[match_expr], params=[query, query])
def get_by_model(self, Model, tags): """ Create a queryset matching instances of the given Model associated with a given Tag or list of Tags. """ tags = get_tag_list(tags) if len(tags) == 1: tag = tags[0] # Optimisation for single tag else: return self.get_intersection_by_model(Model, tags) ctype = ContentType.objects.get_for_model(Model) rel_table = backend.quote_name(self.model._meta.db_table) return Model._default_manager.extra( tables=[self.model._meta.db_table], # Use a non-explicit join where=[ '%s.content_type_id = %%s' % rel_table, '%s.tag_id = %%s' % rel_table, '%s.%s = %s.object_id' % (backend.quote_name(Model._meta.db_table), backend.quote_name(Model._meta.pk.column), rel_table) ], params=[ctype.id, tag.id], )
def search(self, query): meta = self.model._meta # Get the table name and column names from the model # in `table_name`.`column_name` style print "*", self._search_field print self._search_field column = meta.get_field(self._search_field, many_to_many=False).column full_name = "%s.%s" % (backend.quote_name( meta.db_table), backend.quote_name(column)) # Create the MATCH...AGAINST expressions fulltext_column = full_name match_expr = ("MATCH(%s) AGAINST (%%s)" % fulltext_column) # Add the extra SELECT and WHERE options return self.extra(select={'relevance': match_expr}, where=[match_expr], params=[query, query])
def get_where_clause(lookup_type, table_prefix, field_name, value): if table_prefix.endswith('.'): table_prefix = backend.quote_name(table_prefix[:-1])+'.' field_name = backend.quote_name(field_name) try: return '%s%s %s' % (table_prefix, field_name, (backend.OPERATOR_MAPPING[lookup_type] % '%s')) except KeyError: pass if lookup_type == 'in': in_string = ','.join(['%s' for id in value]) if in_string: return '%s%s IN (%s)' % (table_prefix, field_name, in_string) else: raise EmptyResultSet elif lookup_type in ('range', 'year'): return '%s%s BETWEEN %%s AND %%s' % (table_prefix, field_name) elif lookup_type in ('month', 'day'): return "%s = %%s" % backend.get_date_extract_sql(lookup_type, table_prefix + field_name) elif lookup_type == 'isnull': return "%s%s IS %sNULL" % (table_prefix, field_name, (not value and 'NOT ' or '')) elif lookup_type == 'search': return backend.get_fulltext_search_sql(table_prefix + field_name) raise TypeError, "Got invalid lookup_type: %s" % repr(lookup_type)
def in_bulk(self, id_list): """ Returns a dictionary mapping each of the given IDs to the object with that ID. """ assert self._limit is None and self._offset is None, \ "Cannot use 'limit' or 'offset' with in_bulk" assert isinstance(id_list, (tuple, list)), "in_bulk() must be provided with a list of IDs." id_list = list(id_list) if id_list == []: return {} qs = self._clone() qs._where.append("%s.%s IN (%s)" % (backend.quote_name(self.model._meta.db_table), backend.quote_name(self.model._meta.pk.column), ",".join(['%s'] * len(id_list)))) qs._params.extend(id_list) return dict([(obj._get_pk_val(), obj) for obj in qs.iterator()])
def match_model_table(self, match): """ first pass: find all Model tables like Model:name """ from django.db import backend noselect = match.group("noselect") if noselect == ':': noselect = False model = match.group("model") alias = match.group("alias") if self.table_by_alias.has_key(alias): raise Exception("duplicate alias: %s" %(alias,)) table = self.DjSelectModelTable(model, alias, self.caller_frame) table.noselect = noselect self.table_by_alias[alias] = table if not noselect: table.column_start += self.column_start self.column_start += len(table.fields) self.table_inorder.append(table) return(backend.quote_name(table.db_table) + " " + backend.quote_name(table.sql_alias))
def destroy_test_db(old_database_name, verbosity=1): # Unless we're using SQLite, remove the test database to clean up after # ourselves. Connect to the previous database (not the test database) # to do so, because it's not allowed to delete a database while being # connected to it. if verbosity >= 1: print "Destroying test database..." connection.close() TEST_DATABASE_NAME = settings.DATABASE_NAME settings.DATABASE_NAME = old_database_name if settings.DATABASE_ENGINE != "sqlite3": cursor = connection.cursor() _set_autocommit(connection) time.sleep(1) # To avoid "database is being accessed by other users" errors. cursor.execute("DROP DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME)) connection.close()
def get_counts_in_bulk(self, objects): """ Get a dictionary mapping object ids to the total number of comments made against each object. """ query = """ SELECT object_id, COUNT(object_id) FROM %s WHERE content_type_id = %%s AND object_id IN (%s) GROUP BY object_id""" % (backend.quote_name( self.model._meta.db_table), ','.join(['%s'] * len(objects))) ctype = ContentType.objects.get_for_model(objects[0]) cursor = connection.cursor() cursor.execute(query, [ctype.id] + [obj.id for obj in objects]) results = cursor.fetchall() return dict([(object_id, num_comments) \ for object_id, num_comments in results])
def destroy_test_db(old_database_name, verbosity=1): # Unless we're using SQLite, remove the test database to clean up after # ourselves. Connect to the previous database (not the test database) # to do so, because it's not allowed to delete a database while being # connected to it. if verbosity >= 1: print "Destroying test database..." connection.close() TEST_DATABASE_NAME = settings.DATABASE_NAME settings.DATABASE_NAME = old_database_name if settings.DATABASE_ENGINE != "sqlite3": cursor = connection.cursor() _set_autocommit(connection) time.sleep( 1) # To avoid "database is being accessed by other users" errors. cursor.execute("DROP DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME)) connection.close()
def has_module_row_level_perms(self, app_label): cursor = connection.cursor() sql = """ SELECT COUNT(*) FROM %s ct, %s rlp WHERE rlp.%s = ct.%s AND ct.%s=%%s AND rlp.%s = %%s AND rlp.%s = %%s AND rlp.%s = %%s """ % ( backend.quote_name('django_content_type'), backend.quote_name('auth_rowlevelpermission'), backend.quote_name('model_ct_id'), backend.quote_name('id'), backend.quote_name('app_label'), backend.quote_name('owner_ct_id'), backend.quote_name('owner_id'),backend.quote_name('negative'), ) cursor.execute(sql, [app_label, ContentType.objects.get_for_model(User).id, self.id, False]) count = int(cursor.fetchone()[0]) if count > 0: return True return self.has_module_group_row_level_perms(app_label)
def get_counts_in_bulk(self, objects): """ Get a dictionary mapping object ids to the total number of comments made against each object. """ query = """ SELECT object_id, COUNT(object_id) FROM %s WHERE content_type_id = %%s AND object_id IN (%s) GROUP BY object_id""" % ( backend.quote_name(self.model._meta.db_table), ','.join(['%s'] * len(objects)) ) ctype = ContentType.objects.get_for_model(objects[0]) cursor = connection.cursor() cursor.execute(query, [ctype.id] + [obj.id for obj in objects]) results = cursor.fetchall() return dict([(object_id, num_comments) \ for object_id, num_comments in results])
def get_sql(self, opts): joins, where, params = ( super(ExtendedManager._CustomJoinQ, self).get_sql(opts)) new_joins = datastructures.SortedDict() # rename all join aliases and correct references in later joins renamed_tables = [] # using iteritems seems to mess up the ordering here for alias, (table, join_type, condition) in joins.items(): alias = self._unquote_name(alias) new_alias = alias + self._join_suffix renamed_tables.append((alias, new_alias)) condition = self._substitute_aliases(renamed_tables, condition) new_alias = backend.quote_name(new_alias) new_joins[new_alias] = (table, join_type, condition) # correct references in where new_where = [] for clause in where: new_where.append( self._substitute_aliases(renamed_tables, clause)) return new_joins, new_where, params
def quote_only_if_word(word): if re.search('\W', word): # Don't quote if there are spaces or non-word chars. return word else: return backend.quote_name(word)
def check_group_row_level_permissions(self, permission, object): model_id = object._get_pk_val() cursor = connection.cursor() sql = """ SELECT rlp.%s FROM %s ug, %s rlp WHERE rlp.%s = ug.%s AND ug.%s=%%s AND rlp.%s=%%s AND rlp.%s=%%s AND rlp.%s=%%s AND rlp.%s=%%s ORDER BY rlp.%s""" % ( backend.quote_name('negative'), backend.quote_name('auth_user_groups'), backend.quote_name('auth_rowlevelpermission'), backend.quote_name('owner_id'), backend.quote_name('group_id'), backend.quote_name('user_id'), backend.quote_name('owner_ct_id'), backend.quote_name('model_id'), backend.quote_name('model_ct_id'), backend.quote_name('permission_id'), backend.quote_name('negative')) cursor.execute(sql, [self.id, ContentType.objects.get_for_model(Group).id, model_id, ContentType.objects.get_for_model(object).id, permission.id,]) row = cursor.fetchone() if row is None: return None return not row[0]
def get_group_permissions(self): "Returns a list of permission strings that this user has through his/her groups." if not hasattr(self, '_group_perm_cache'): import sets cursor = connection.cursor() # The SQL below works out to the following, after DB quoting: # cursor.execute(""" # SELECT ct."app_label", p."codename" # FROM "auth_permission" p, "auth_group_permissions" gp, "auth_user_groups" ug, "django_content_type" ct # WHERE p."id" = gp."permission_id" # AND gp."group_id" = ug."group_id" # AND ct."id" = p."content_type_id" # AND ug."user_id" = %s, [self.id]) sql = """ SELECT ct.%s, p.%s FROM %s p, %s gp, %s ug, %s ct WHERE p.%s = gp.%s AND gp.%s = ug.%s AND ct.%s = p.%s AND ug.%s = %%s""" % ( backend.quote_name('app_label'), backend.quote_name('codename'), backend.quote_name('auth_permission'), backend.quote_name('auth_group_permissions'), backend.quote_name('auth_user_groups'), backend.quote_name('django_content_type'), backend.quote_name('id'), backend.quote_name('permission_id'), backend.quote_name('group_id'), backend.quote_name('group_id'), backend.quote_name('id'), backend.quote_name('content_type_id'), backend.quote_name('user_id'),) cursor.execute(sql, [self.id]) self._group_perm_cache = sets.Set(["%s.%s" % (row[0], row[1]) for row in cursor.fetchall()]) return self._group_perm_cache
def usage_for_model(self, Model, counts=False, min_count=None, filters=None): """ Obtain a list of tags associated with instances of the given Model. If ``counts`` is True, a ``count`` attribute will be added to each tag, indicating how many times it has been used against the Model in question. If ``min_count`` is given, only tags which have a ``count`` greater than or equal to ``min_count`` will be returned. Passing a value for ``min_count`` implies ``counts=True``. To limit the tags (and counts, if specified) returned to those used by a subset of the Model's instances, pass a dictionary of field lookups to be applied to the given Model as the ``filters`` argument. """ if filters is None: filters = {} if min_count is not None: counts = True model_table = backend.quote_name(Model._meta.db_table) model_pk = '%s.%s' % (model_table, backend.quote_name(Model._meta.pk.column)) query = """ SELECT DISTINCT %(tag)s.id, %(tag)s.name%(count_sql)s FROM %(tag)s INNER JOIN %(tagged_item)s ON %(tag)s.id = %(tagged_item)s.tag_id INNER JOIN %(model)s ON %(tagged_item)s.object_id = %(model_pk)s %%s WHERE %(tagged_item)s.content_type_id = %(content_type_id)s %%s GROUP BY %(tag)s.id, %(tag)s.name %%s ORDER BY %(tag)s.name ASC""" % { 'tag': backend.quote_name(self.model._meta.db_table), 'count_sql': counts and (', COUNT(%s)' % model_pk) or '', 'tagged_item': backend.quote_name( self._get_related_model_by_accessor('items')._meta.db_table), 'model': model_table, 'model_pk': model_pk, 'content_type_id': ContentType.objects.get_for_model(Model).id, } extra_joins = '' extra_criteria = '' min_count_sql = '' params = [] if len(filters) > 0: joins, where, params = parse_lookup(filters.items(), Model._meta) extra_joins = ' '.join([ '%s %s AS %s ON %s' % (join_type, table, alias, condition) for (alias, (table, join_type, condition)) in joins.items() ]) extra_criteria = 'AND %s' % (' AND '.join(where)) if min_count is not None: min_count_sql = 'HAVING COUNT(%s) >= %%s' % model_pk params.append(min_count) cursor = connection.cursor() cursor.execute(query % (extra_joins, extra_criteria, min_count_sql), params) tags = [] for row in cursor.fetchall(): t = self.model(*row[:2]) if counts: t.count = row[2] tags.append(t) return tags
def quote_name(s): from django.db import backend if quote_flag == 'old': return backend.quote_name(s) else: return backend.DatabaseOperations().quote_name(s)
def related_for_model(self, tags, Model, counts=False, min_count=None): """ Obtain a list of tags related to a given list of tags - that is, other tags used by items which have all the given tags. If ``counts`` is True, a ``count`` attribute will be added to each tag, indicating the number of items which have it in addition to the given list of tags. If ``min_count`` is given, only tags which have a ``count`` greater than or equal to ``min_count`` will be returned. Passing a value for ``min_count`` implies ``counts=True``. """ if min_count is not None: counts = True tags = get_tag_list(tags) tag_count = len(tags) tagged_item_table = backend.quote_name( self._get_related_model_by_accessor('items')._meta.db_table) query = """ SELECT %(tag)s.id, %(tag)s.name%(count_sql)s FROM %(tagged_item)s INNER JOIN %(tag)s ON %(tagged_item)s.tag_id = %(tag)s.id WHERE %(tagged_item)s.content_type_id = %(content_type_id)s AND %(tagged_item)s.object_id IN ( SELECT %(tagged_item)s.object_id FROM %(tagged_item)s, %(tag)s WHERE %(tagged_item)s.content_type_id = %(content_type_id)s AND %(tag)s.id = %(tagged_item)s.tag_id AND %(tag)s.id IN (%(tag_id_placeholders)s) GROUP BY %(tagged_item)s.object_id HAVING COUNT(%(tagged_item)s.object_id) = %(tag_count)s ) AND %(tag)s.id NOT IN (%(tag_id_placeholders)s) GROUP BY %(tag)s.id, %(tag)s.name %(min_count_sql)s ORDER BY %(tag)s.name ASC""" % { 'tag': backend.quote_name(self.model._meta.db_table), 'count_sql': counts and ', COUNT(%s.object_id)' % tagged_item_table or '', 'tagged_item': tagged_item_table, 'content_type_id': ContentType.objects.get_for_model(Model).id, 'tag_id_placeholders': ','.join(['%s'] * tag_count), 'tag_count': tag_count, 'min_count_sql': min_count is not None and ('HAVING COUNT(%s.object_id) >= %%s' % tagged_item_table) or '', } params = [tag.id for tag in tags] * 2 if min_count is not None: params.append(min_count) cursor = connection.cursor() cursor.execute(query, params) related = [] for row in cursor.fetchall(): tag = self.model(*row[:2]) if counts is True: tag.count = row[2] related.append(tag) return related
def _get_sql_clause(self): opts = self.model._meta # Construct the fundamental parts of the query: SELECT X FROM Y WHERE Z. select = [ "%s.%s" % (backend.quote_name(opts.db_table), backend.quote_name(f.column)) for f in opts.fields ] tables = [quote_only_if_word(t) for t in self._tables] joins = SortedDict() where = self._where[:] params = self._params[:] # Convert self._filters into SQL. joins2, where2, params2 = self._filters.get_sql(opts) joins.update(joins2) where.extend(where2) params.extend(params2) # Add additional tables and WHERE clauses based on select_related. if self._select_related: fill_table_cache(opts, select, tables, where, old_prefix=opts.db_table, cache_tables_seen=[opts.db_table], max_depth=self._max_related_depth) # Add any additional SELECTs. if self._select: select.extend([ '(%s) AS %s' % (quote_only_if_word(s[1]), backend.quote_name(s[0])) for s in self._select.items() ]) # Start composing the body of the SQL statement. sql = [" FROM", backend.quote_name(opts.db_table)] # Compose the join dictionary into SQL describing the joins. if joins: sql.append(" ".join([ "%s %s AS %s ON %s" % (join_type, table, alias, condition) for (alias, (table, join_type, condition)) in joins.items() ])) # Compose the tables clause into SQL. if tables: sql.append(", " + ", ".join(tables)) # Compose the where clause into SQL. if where: sql.append(where and "WHERE " + " AND ".join(where)) # ORDER BY clause order_by = [] if self._order_by is not None: ordering_to_use = self._order_by else: ordering_to_use = opts.ordering for f in handle_legacy_orderlist(ordering_to_use): if f == '?': # Special case. order_by.append(backend.get_random_function_sql()) else: if f.startswith('-'): col_name = f[1:] order = "DESC" else: col_name = f order = "ASC" if "." in col_name: table_prefix, col_name = col_name.split('.', 1) table_prefix = backend.quote_name(table_prefix) + '.' else: # Use the database table as a column prefix if it wasn't given, # and if the requested column isn't a custom SELECT. if "." not in col_name and col_name not in (self._select or ()): table_prefix = backend.quote_name(opts.db_table) + '.' else: table_prefix = '' order_by.append( '%s%s %s' % (table_prefix, backend.quote_name(orderfield2column(col_name, opts)), order)) if order_by: sql.append("ORDER BY " + ", ".join(order_by)) # LIMIT and OFFSET clauses if self._limit is not None: sql.append("%s " % backend.get_limit_offset_sql(self._limit, self._offset)) else: assert self._offset is None, "'offset' is not allowed without 'limit'" return select, " ".join(sql), params
def get_group_permissions(self): "Returns a list of permission strings that this user has through his/her groups." if not hasattr(self, '_group_perm_cache'): import sets cursor = connection.cursor() # The SQL below works out to the following, after DB quoting: # cursor.execute(""" # SELECT ct."app_label", p."codename" # FROM "auth_permission" p, "auth_group_permissions" gp, "auth_user_groups" ug, "django_content_type" ct # WHERE p."id" = gp."permission_id" # AND gp."group_id" = ug."group_id" # AND ct."id" = p."content_type_id" # AND ug."user_id" = %s, [self.id]) sql = """ SELECT ct.%s, p.%s FROM %s p, %s gp, %s ug, %s ct WHERE p.%s = gp.%s AND gp.%s = ug.%s AND ct.%s = p.%s AND ug.%s = %%s""" % ( backend.quote_name('app_label'), backend.quote_name('codename'), backend.quote_name('auth_permission'), backend.quote_name('auth_group_permissions'), backend.quote_name('auth_user_groups'), backend.quote_name('django_content_type'), backend.quote_name('id'), backend.quote_name('permission_id'), backend.quote_name('group_id'), backend.quote_name('group_id'), backend.quote_name('id'), backend.quote_name('content_type_id'), backend.quote_name('user_id'), ) cursor.execute(sql, [self.id]) self._group_perm_cache = sets.Set( ["%s.%s" % (row[0], row[1]) for row in cursor.fetchall()]) return self._group_perm_cache
def save(self): dispatcher.send(signal=signals.pre_save, sender=self.__class__, instance=self) non_pks = [f for f in self._meta.fields if not f.primary_key] cursor = connection.cursor() # First, try an UPDATE. If that doesn't update anything, do an INSERT. pk_val = self._get_pk_val() pk_set = bool(pk_val) record_exists = True if pk_set: # Determine whether a record with the primary key already exists. cursor.execute("SELECT 1 FROM %s WHERE %s=%%s LIMIT 1" % \ (backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column)), [pk_val]) # If it does already exist, do an UPDATE. if cursor.fetchone(): db_values = [ f.get_db_prep_save(f.pre_save(self, False)) for f in non_pks ] if db_values: cursor.execute("UPDATE %s SET %s WHERE %s=%%s" % \ (backend.quote_name(self._meta.db_table), ','.join(['%s=%%s' % backend.quote_name(f.column) for f in non_pks]), backend.quote_name(self._meta.pk.column)), db_values + [pk_val]) else: record_exists = False if not pk_set or not record_exists: field_names = [ backend.quote_name(f.column) for f in self._meta.fields if not isinstance(f, AutoField) ] db_values = [ f.get_db_prep_save(f.pre_save(self, True)) for f in self._meta.fields if not isinstance(f, AutoField) ] # If the PK has been manually set, respect that. if pk_set: field_names += [ f.column for f in self._meta.fields if isinstance(f, AutoField) ] db_values += [ f.get_db_prep_save(f.pre_save(self, True)) for f in self._meta.fields if isinstance(f, AutoField) ] placeholders = ['%s'] * len(field_names) if self._meta.order_with_respect_to: field_names.append(backend.quote_name('_order')) # TODO: This assumes the database supports subqueries. placeholders.append('(SELECT COUNT(*) FROM %s WHERE %s = %%s)' % \ (backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.order_with_respect_to.column))) db_values.append( getattr(self, self._meta.order_with_respect_to.attname)) if db_values: cursor.execute("INSERT INTO %s (%s) VALUES (%s)" % \ (backend.quote_name(self._meta.db_table), ','.join(field_names), ','.join(placeholders)), db_values) else: # Create a new record with defaults for everything. cursor.execute("INSERT INTO %s (%s) VALUES (%s)" % (backend.quote_name(self._meta.db_table), backend.quote_name(self._meta.pk.column), backend.get_pk_default_value())) if self._meta.has_auto_field and not pk_set: setattr( self, self._meta.pk.attname, backend.get_last_insert_id(cursor, self._meta.db_table, self._meta.pk.column)) transaction.commit_unless_managed() # Run any post-save hooks. dispatcher.send(signal=signals.post_save, sender=self.__class__, instance=self)