def get_model_siblings(Model, instance, queryset): pk = Model._meta.pk.name ordering = queryset.query.order_by if len(ordering) == 0 and len(Model._meta.ordering): ordering = Model._meta.ordering if not any(map(lambda x: x == pk or x == '-{}'.format(pk), ordering)): ordering = list(ordering) + ['-{}'.format(pk)] queryset.query.order_by = ordering compiler = SQLCompiler(queryset.query, connection, queryset.db) compiler.as_sql() join_sql, join_args = create_joins(queryset, compiler) where_sql, where_args = create_where(queryset, compiler) order_by_sql = create_order_by(compiler) row_number = get_row_number(Model, instance, join_sql, join_args, where_sql, where_args, order_by_sql) if not row_number: return {} return get_row_siblings(Model, row_number, join_sql, join_args, where_sql, where_args, order_by_sql)
def as_sql( self, compiler: SQLCompiler, connection: BaseDatabaseWrapper, **extra_context: Any, ) -> tuple[str, tuple[Any, ...]]: connection.ops.check_expression_support(self) sql = ["GROUP_CONCAT("] if self.distinct: sql.append("DISTINCT ") expr_parts = [] params = [] for arg in self.source_expressions: arg_sql, arg_params = compiler.compile(arg) expr_parts.append(arg_sql) params.extend(arg_params) expr_sql = self.arg_joiner.join(expr_parts) sql.append(expr_sql) if self.ordering is not None: sql.append(" ORDER BY ") sql.append(expr_sql) params.extend(params[:]) sql.append(" ") sql.append(self.ordering.upper()) if self.separator is not None: sql.append(f" SEPARATOR '{self.separator}'") sql.append(")") return "".join(sql), tuple(params)
def as_sql(self, compiler: SQLCompiler, connection: BaseDatabaseWrapper) -> tuple[str, Iterable[Any]]: lhs, params = compiler.compile(self.lhs) return ( f"COLUMN_GET({lhs}, %s AS {self.data_type})", tuple(params) + (self.key_name, ), )
def as_sql( self, compiler: SQLCompiler, connection: BaseDatabaseWrapper, **extra_context: Any, ) -> Tuple[str, List[Any]]: sql, params = compiler.compile(self.value) return (sql, params)
def get_converters_with_compiler( expression: T_SupportsConversion, compiler: SQLCompiler ) -> ConvertersExpressionPair: converters = compiler.get_converters([expression]) # type: ConverterDict if not converters: return [], expression return converters[0]
def as_sql( self, compiler: SQLCompiler, connection: BaseDatabaseWrapper, ) -> tuple[str, tuple[Any, ...]]: field, field_params = compiler.compile(self.lhs) sql = self.sql_expression % (field) return sql, tuple(field_params)
def get_triggers(self, using): if using: cconnection = connections[using] else: cconnection = connection qn = self.get_quote_name(using) related_field = self.manager.related.field if isinstance(related_field, ManyToManyField): fk_name = related_field.m2m_reverse_name() inc_where = [ "%(id)s IN (SELECT %(reverse_related)s FROM %(m2m_table)s WHERE %(related)s = NEW.%(id)s)" % { 'id': qn(self.model._meta.pk.get_attname_column()[0]), 'related': qn(related_field.m2m_column_name()), 'm2m_table': qn(related_field.m2m_db_table()), 'reverse_related': qn(fk_name), } ] dec_where = [ action.replace('NEW.', 'OLD.') for action in inc_where ] else: pk_name = qn(self.model._meta.pk.get_attname_column()[1]) fk_name = qn(related_field.attname) inc_where = ["%s = NEW.%s" % (pk_name, fk_name)] dec_where = ["%s = OLD.%s" % (pk_name, fk_name)] content_type = str(ContentType.objects.get_for_model(self.model).pk) inc_query = TriggerFilterQuery(self.manager.related.model, trigger_alias='NEW') inc_query.add_q(Q(**self.filter)) inc_query.add_q(~Q(**self.exclude)) inc_filter_where, _ = inc_query.where.as_sql( SQLCompiler(inc_query, cconnection, using).quote_name_unless_alias, cconnection) dec_query = TriggerFilterQuery(self.manager.related.model, trigger_alias='OLD') dec_query.add_q(Q(**self.filter)) dec_query.add_q(~Q(**self.exclude)) dec_filter_where, where_params = dec_query.where.as_sql( SQLCompiler(dec_query, cconnection, using).quote_name_unless_alias, cconnection) if inc_filter_where: inc_where.append(inc_filter_where) if dec_filter_where: dec_where.append(dec_filter_where) # create the triggers for the incremental updates increment = triggers.TriggerActionUpdate( model=self.model, columns=(self.fieldname, ), values=(self.get_increment_value(using), ), where=(' AND '.join(inc_where), where_params), ) decrement = triggers.TriggerActionUpdate( model=self.model, columns=(self.fieldname, ), values=(self.get_decrement_value(using), ), where=(' AND '.join(dec_where), where_params), ) other_model = self.manager.related.model trigger_list = [ triggers.Trigger(other_model, "after", "update", [increment, decrement], content_type, using, self.skip), triggers.Trigger(other_model, "after", "insert", [increment], content_type, using, self.skip), triggers.Trigger(other_model, "after", "delete", [decrement], content_type, using, self.skip), ] if isinstance(related_field, ManyToManyField): trigger_list.extend( self.m2m_triggers(content_type, fk_name, related_field, using)) return trigger_list
def as_sql(self, compiler: SQLCompiler, connection: BaseDatabaseWrapper) -> tuple[str, Iterable[Any]]: lhs, params = compiler.compile(self.lhs) return self.expr % (lhs, lhs, lhs), params
def as_sql(self, compiler: SQLCompiler, connection: BaseDatabaseWrapper) -> tuple[str, Iterable[Any]]: lhs, params = compiler.compile(self.lhs) return "SOUNDEX(%s)" % lhs, params
def get_compiler(self, using=None, connection=None): # Call super to figure out using and connection. c = super(TranslationQuery, self).get_compiler(using, connection) return SQLCompiler(self, c.connection, c.using)
def as_salesforce(self, compiler: sql_compiler.SQLCompiler, connection) -> Tuple[str, List[Any]]: """ Return the SQL version of the where clause and the value to be substituted in. Return '', [] if this node matches everything, None, [] if this node is empty, and raise EmptyResultSet if this node can't match anything. """ # *** patch 1 (add) begin # # prepare SOQL translations if not isinstance(compiler, SQLCompiler): # future fix for DJANGO_20_PLUS, when deprecated "use_for_related_fields" # removed from managers, # "str(<UpdateQuery...>)" or "<UpdateQuery...>.get_compiler('default').as_sql()" return super(SalesforceWhereNode, self).as_sql(compiler, connection) soql_trans = compiler.query_topology() # *** patch 1 end result = [] result_params = [] # type: List[Any] if self.connector == AND: full_needed, empty_needed = len(self.children), 1 else: full_needed, empty_needed = 1, len(self.children) for child in self.children: try: sql, params = compiler.compile(child) except EmptyResultSet: empty_needed -= 1 else: if sql: # *** patch 2 (add) begin # # translate the alias of child to SOQL name x_match = re.match(r'(\w+)\.(.*)', sql) if x_match: x_table, x_field = x_match.groups() sql = '%s.%s' % (soql_trans[x_table], x_field) # print('sql params:', sql, params) # *** patch 2 end result.append(sql) result_params.extend(params) else: full_needed -= 1 # Check if this node matches nothing or everything. # First check the amount of full nodes and empty nodes # to make this node empty/full. # Now, check if this node is full/empty using the # counts. if empty_needed == 0: if self.negated: return '', [] else: raise EmptyResultSet if full_needed == 0: if self.negated: raise EmptyResultSet else: return '', [] conn = ' %s ' % self.connector sql_string = conn.join(result) if sql_string: if self.negated: # *** patch 3 (remove) begin # # Some backends (Oracle at least) need parentheses # # around the inner SQL in the negated case, even if the # # inner SQL contains just a single expression. # sql_string = 'NOT (%s)' % sql_string # *** patch 3 (add) # SOQL requires parentheses around "NOT" expression, if combined with AND/OR sql_string = '(NOT (%s))' % sql_string # *** patch 3 end # *** patch 4 (combine two versions into one compatible) begin # elif len(result) > 1: # Django 1.11 # elif len(result) > 1 or self.resolved: # Django 2.0, 2.1 elif len(result) > 1 or getattr(self, 'resolved', False): # compatible code # *** patch 4 end sql_string = '(%s)' % sql_string return sql_string, result_params