Ejemplo n.º 1
0
def _validate_where(model, where, using):
    # type: (Type[Model], Optional[WhereNode], Optional[str]) -> Tuple[str, tuple]
    """
    Validates where clause (if given).
    Translates it into sql + params tuple
    :param model: Model, where clause is applied to
    :param where: WhereNode instance as django generates it from QuerySet
    :param using: Database alias to use
    :return: Sql, params tuple
    """
    if where is None:
        return '', tuple()

    if not isinstance(where, WhereNode):
        raise TypeError("'where' must be a WhereNode instance")

    # In Django 1.7 there is no method
    if hasattr(where, 'contains_aggregate') and where.contains_aggregate:
        raise ValueError("'where' should not contain aggregates")

    query = UpdateQuery(model)
    conn = connections[using] if using else connection
    compiler = query.get_compiler(connection=conn)
    sql, params = where.as_sql(compiler, conn)

    # I change table name to "t" inside queries
    if sql:
        sql = sql.replace('"%s"' % model._meta.db_table, '"t"')

    return sql, params
 def get_query(cls,
               field,
               with_table=False,
               for_update=True):  # type: (Field, bool, bool) -> Query
     """
     Gets django query for current SQL generation, depending on generation parameters
     :param field: Field for which to get query
     :param with_table: If flag is set, column name in sql will be prefixed by table name
     :param for_update: If flag is set, update query is generated. Otherwise - insert query
     :return: Query instance
     """
     kwargs = {'alias_cols': with_table} if django.VERSION >= (3, 1) else {}
     query = UpdateQuery(field.model, **
                         kwargs) if for_update else InsertQuery(
                             field.model, **kwargs)
     return query
Ejemplo n.º 3
0
def bulk_update(objs, meta=None, update_fields=None, exclude_fields=None,
                using='default', batch_size=None, pk_field='pk'):
    assert batch_size is None or batch_size > 0

    # force to retrieve objs from the DB at the beginning,
    # to avoid multiple subsequent queries
    objs = list(objs)
    if not objs:
        return
    batch_size = batch_size or len(objs)

    if meta:
        fields = get_fields(update_fields, exclude_fields, meta)
    else:
        meta = objs[0]._meta
        if update_fields is not None:
            fields = get_fields(update_fields, exclude_fields, meta, objs[0])
        else:
            fields = None

    if fields is not None and len(fields) == 0:
        return

    if pk_field == 'pk':
        pk_field = meta.get_field(meta.pk.name)
    else:
        pk_field = meta.get_field(pk_field)

    connection = connections[using]
    query = UpdateQuery(meta.model)
    compiler = query.get_compiler(connection=connection)

    # The case clause template; db-dependent
    # Apparently, mysql's castable types are very limited and have
    # nothing to do with the column types. Still, it handles the uncast
    # types well enough... hopefully.
    # http://dev.mysql.com/doc/refman/5.5/en/cast-functions.html#function_cast
    #
    # Sqlite also gives some trouble with cast, at least for datetime,
    # but is also permissive for uncast values
    vendor = connection.vendor
    use_cast = 'mysql' not in vendor and 'sqlite' not in vendor
    if use_cast:
        template = '"{column}" = CAST(CASE "{pk_column}" {cases}ELSE "{column}" END AS {type})'
    else:
        template = '"{column}" = (CASE "{pk_column}" {cases}ELSE "{column}" END)'

    case_template = "WHEN %s THEN {} "

    lenpks = 0
    for objs_batch in grouper(objs, batch_size):

        pks = []
        parameters = defaultdict(list)
        placeholders = defaultdict(list)

        for obj in objs_batch:

            pk_value, _ = _as_sql(obj, pk_field, query, compiler, connection)
            pks.append(pk_value)

            loaded_fields = fields or get_fields(update_fields, exclude_fields, meta, obj)

            for field in loaded_fields:
                value, placeholder = _as_sql(obj, field, query, compiler, connection)
                parameters[field].extend(flatten([pk_value, value], types=tuple))
                placeholders[field].append(placeholder)

        values = ', '.join(
            template.format(
                column=field.column,
                pk_column=pk_field.column,
                cases=(case_template*len(placeholders[field])).format(*placeholders[field]),
                type=_get_db_type(field, connection=connection),
            )
            for field in parameters.keys()
        )

        parameters = flatten(parameters.values(), types=list)
        parameters.extend(pks)

        n_pks = len(pks)
        del pks

        dbtable = '"{}"'.format(meta.db_table)

        in_clause = '"{pk_column}" in ({pks})'.format(
            pk_column=pk_field.column,
            pks=', '.join(itertools.repeat('%s', n_pks)),
        )

        sql = 'UPDATE {dbtable} SET {values} WHERE {in_clause}'.format(
            dbtable=dbtable,
            values=values,
            in_clause=in_clause,
        )
        del values

        # String escaping in ANSI SQL is done by using double quotes (").
        # Unfortunately, this escaping method is not portable to MySQL,
        # unless it is set in ANSI compatibility mode.
        if 'mysql' in vendor:
            sql = sql.replace('"', '`')

        lenpks += n_pks

        connection.cursor().execute(sql, parameters)

    return lenpks
Ejemplo n.º 4
0
def bulk_update(objs, meta=None, update_fields=None, exclude_fields=None,
                using='default', batch_size=None, pk_field='pk'):
    assert batch_size is None or batch_size > 0

    # force to retrieve objs from the DB at the beginning,
    # to avoid multiple subsequent queries
    objs = list(objs)
    if not objs:
        return
    batch_size = batch_size or len(objs)

    if meta:
        fields = get_fields(update_fields, exclude_fields, meta)
    else:
        meta = objs[0]._meta
        if update_fields is not None:
            fields = get_fields(update_fields, exclude_fields, meta, objs[0])
        else:
            fields = None

    if fields is not None and len(fields) == 0:
        return

    if pk_field == 'pk':
        pk_field = meta.get_field(meta.pk.name)
    else:
        pk_field = meta.get_field(pk_field)

    connection = connections[using]
    query = UpdateQuery(meta.model)
    compiler = query.get_compiler(connection=connection)

    # The case clause template; db-dependent
    # Apparently, mysql's castable types are very limited and have
    # nothing to do with the column types. Still, it handles the uncast
    # types well enough... hopefully.
    # http://dev.mysql.com/doc/refman/5.5/en/cast-functions.html#function_cast
    #
    # Sqlite also gives some trouble with cast, at least for datetime,
    # but is also permissive for uncast values
    vendor = connection.vendor
    use_cast = 'mysql' not in vendor and 'sqlite' not in vendor
    if use_cast:
        template = '"{column}" = CAST(CASE "{pk_column}" {cases}ELSE "{column}" END AS {type})'
    else:
        template = '"{column}" = (CASE "{pk_column}" {cases}ELSE "{column}" END)'

    case_template = "WHEN %s THEN {} "

    lenpks = 0
    for objs_batch in grouper(objs, batch_size):

        pks = []
        parameters = defaultdict(list)
        placeholders = defaultdict(list)

        for obj in objs_batch:

            pk_value, _ = _as_sql(obj, pk_field, query, compiler, connection)
            pks.append(pk_value)

            loaded_fields = fields or get_fields(update_fields, exclude_fields, meta, obj)

            for field in loaded_fields:
                value, placeholder = _as_sql(obj, field, query, compiler, connection)
                parameters[field].extend(flatten([pk_value, value], types=tuple))
                placeholders[field].append(placeholder)

        values = ', '.join(
            template.format(
                column=field.column,
                pk_column=pk_field.column,
                cases=(case_template*len(placeholders[field])).format(*placeholders[field]),
                type=_get_db_type(field, connection=connection),
            )
            for field in parameters.keys()
        )

        parameters = flatten(parameters.values(), types=list)
        parameters.extend(pks)

        n_pks = len(pks)
        del pks

        dbtable = '"{}"'.format(meta.db_table)

        in_clause = '"{pk_column}" in ({pks})'.format(
            pk_column=pk_field.column,
            pks=', '.join(itertools.repeat('%s', n_pks)),
        )

        sql = 'UPDATE {dbtable} SET {values} WHERE {in_clause}'.format(
            dbtable=dbtable,
            values=values,
            in_clause=in_clause,
        )
        del values

        # String escaping in ANSI SQL is done by using double quotes (").
        # Unfortunately, this escaping method is not portable to MySQL,
        # unless it is set in ANSI compatibility mode.
        if 'mysql' in vendor:
            sql = sql.replace('"', '`')

        lenpks += n_pks

        signal_kwargs = {
            'raw': False,
            'using': using,
            'update_fields': update_fields,
        }

        for obj in objs_batch:
            pre_save.send(
                sender=obj.__class__,
                instance=obj,
                **signal_kwargs
            )

        connection.cursor().execute(sql, parameters)

        for obj in objs_batch:
            post_save.send(
                sender=obj.__class__,
                instance=obj,
                created=False,
                **signal_kwargs
            )

    return lenpks
def bulk_update(objs,
                meta=None,
                update_fields=None,
                exclude_fields=None,
                using='default',
                batch_size=None,
                pk_field='pk'):
    assert batch_size is None or batch_size > 0

    # force to retrieve objs from the DB at the beginning,
    # to avoid multiple subsequent queries
    objs = list(objs)
    if not objs:
        return
    batch_size = batch_size or len(objs)

    if meta:
        fields = get_fields(update_fields, exclude_fields, meta)
    else:
        meta = objs[0]._meta
        if update_fields is not None:
            fields = get_fields(update_fields, exclude_fields, meta, objs[0])
        else:
            fields = None

    if fields is not None and len(fields) == 0:
        return

    if pk_field == 'pk':
        pk_field = meta.get_field(meta.pk.name)
    else:
        pk_field = meta.get_field(pk_field)

    connection = connections[using]
    query = UpdateQuery(meta.model)
    compiler = query.get_compiler(connection=connection)

    template = '"{column}" = CAST(CASE "{pk_column}" {cases}ELSE "{column}" END AS {type})'

    case_template = "WHEN %s THEN {} "

    lenpks = 0
    for objs_batch in grouper(objs, batch_size):

        pks = []
        parameters = defaultdict(list)
        placeholders = defaultdict(list)

        for obj in objs_batch:

            pk_value, _ = _as_sql(obj, pk_field, query, compiler, connection)
            pks.append(pk_value)

            loaded_fields = fields or get_fields(update_fields, exclude_fields,
                                                 meta, obj)

            for field in loaded_fields:
                value, placeholder = _as_sql(obj, field, query, compiler,
                                             connection)
                parameters[field].extend(
                    flatten([pk_value, value], types=tuple))
                placeholders[field].append(placeholder)

        values = ', '.join(
            template.format(
                column=field.column,
                pk_column=pk_field.column,
                cases=(case_template *
                       len(placeholders[field])).format(*placeholders[field]),
                type=_get_db_type(field, connection=connection),
            ) for field in parameters.keys())

        parameters = flatten(parameters.values(), types=list)
        parameters.extend(pks)

        n_pks = len(pks)
        del pks

        dbtable = '"{}"'.format(meta.db_table)

        in_clause = '"{pk_column}" in ({pks})'.format(
            pk_column=pk_field.column,
            pks=', '.join(itertools.repeat('%s', n_pks)),
        )

        sql = 'UPDATE {dbtable} SET {values} WHERE {in_clause}'.format(
            dbtable=dbtable,
            values=values,
            in_clause=in_clause,
        )
        del values

        lenpks += n_pks

        connection.cursor().execute(sql, parameters)

    return lenpks