def update_or_create(model, **kwargs): """ This is equivalent to the method with the same name introduced in Django 1.7 """ defaults = kwargs.pop("defaults", {}) logger.debug("Get or create: %s, %s" % (kwargs, defaults)) obj = None created = False try: obj, created = model.objects.get_or_create(defaults=defaults, **kwargs) if not created and defaults: for key, value in defaults.iteritems(): setattr(obj, key, value) obj.save() except Exception, e: from django.db.models import sql sql_query = sql.InsertQuery(obj) if obj else "" logger.exception(e) if sql_query: logger.error("Error executing SQL: %s" % (sql.InsertQuery(obj)))
def _replace_codings(self, new_codings): # Updating tactic: delete all existing codings and codingvalues, then insert # the new ones. This prevents calculating a delta, and confronting the # database with (potentially) many update queries. CodingValue.objects.filter(coding__coded_article=self).delete() Coding.objects.filter(coded_article=self).delete() new_coding_objects = map(partial(_to_coding, self), new_codings) # Saving each coding is pretty inefficient, but Django doesn't allow retrieving # id's when using bulk_create. See Django ticket #19527. if connection.vendor == "postgresql": query = sql.InsertQuery(Coding) query.insert_values(Coding._meta.fields[1:], new_coding_objects) raw_sql, params = query.sql_with_params()[0] new_coding_objects = Coding.objects.raw( "%s %s" % (raw_sql, "RETURNING coding_id"), params) else: # Do naive O(n) approach for coding in new_coding_objects: coding.save() coding_values = itertools.chain.from_iterable( _to_codingvalues(co, c["values"]) for c, co in itertools.izip(new_codings, new_coding_objects)) return (new_coding_objects, CodingValue.objects.bulk_create(coding_values))
def insert_query(model, values, return_id=False, raw_values=False): """ Inserts a new record for the given model. This provides an interface to the InsertQuery class and is how Model.save() is implemented. It is not part of the public API. """ query = sql.InsertQuery(model, connection) query.insert_values(values, raw_values) return query.execute_sql(return_id)
def test_insert_uuid_field(self): import uuid from django.db.models import sql from testapp.models import TestModel obj = TestModel(uuid=uuid.uuid4()) q = sql.InsertQuery(obj) q.insert_values(obj._meta.local_fields, [obj]) statements = q.get_compiler('default').as_sql() # uuid is the last field of TestModel uuid_insert_value = statements[0][1][-1] # the Python value for insertion must be a string whose length is 32 self.assertEqual(type(uuid_insert_value), str) self.assertEqual(len(uuid_insert_value), 32)
def _insert(self, objs, fields, **kwargs): """ Replaces standard insert procedure for bulk_create_returning """ if not getattr(self.model, '_insert_returning', False): return QuerySet._insert(self, objs, fields, **kwargs) # Returns attname, not column. # Before django 1.10 pk fields hasn't been returned from postgres. # In this case, I can't match bulk_create results and return values by primary key. # So I select all data from returned results return_fields = self._get_fields( ignore_deferred=(django.VERSION < (1, 10))) assert len(return_fields) == 1 and list(return_fields.keys())[0] == self.model, \ "You can't fetch relative model fields with returning operation" self._for_write = True using = kwargs.get('using', None) or self.db query_kwargs = {} if django.VERSION < (2, 2) else { 'ignore_conflicts': kwargs.get('ignore_conflicts') } query = sql.InsertQuery(self.model, **query_kwargs) query.insert_values(fields, objs, raw=kwargs.get('raw')) self.model._insert_returning_cache = self._execute_sql(query, return_fields, using=using) if django.VERSION < (3, ): if not kwargs.get('return_id', False): return None inserted_ids = self.model._insert_returning_cache.values_list( self.model._meta.pk.column, flat=True) if not inserted_ids: return None return list( inserted_ids) if len(inserted_ids) > 1 else inserted_ids[0] else: returning_fields = kwargs.get('returning_fields', None) if returning_fields is None: return None columns = [f.column for f in returning_fields] # In django 3.0 single result is returned if single object is returned... flat = django.VERSION < (3, 1) and len(objs) <= 1 return self.model._insert_returning_cache.values_list(*columns, flat=flat)
def _copy_insert(self): cursor = connections[router.db_for_write(self.model)].cursor() data = StringIO.StringIO() fields = self.model._meta.local_fields query = sql.InsertQuery(self.model) query.insert_values(fields, self.objects, raw=False) compiler = query.get_compiler(using=router.db_for_write(self.model)) compiler.return_id = False fields = compiler.query.fields qn = compiler.connection.ops.quote_name columns = [qn(f.column) for f in fields] len_cols = len(columns) len_objs = len(self.objects) r = lambda x: x.replace('\\', '\\\\').replace('\t', '') \ if isinstance(x, basestring) else '\\N' if x is None else x body = '\n'.join(['\t'.join(['%s'] * len_cols)] * len_objs) for sqls, params in compiler.as_sql(): data.write(body % tuple(map(r, params))) data.seek(0) cursor.copy_from(data, self.model._meta.db_table, columns=columns)
def bulk_insert_ignore(self, objs): '''Given an iterable of any model instance, uses the SQL cursor to apply a bunch of INSERT IGNORE statements in one go. ''' if not objs: return # Get the fields from the model fields = self.model._meta.local_fields # Make an insert query query = sql.InsertQuery(self.model) # Given the objects and the fields, prepare the query query.insert_values(fields, objs, raw=False) compiled = query.get_compiler(self.db) compiled.return_id = False # Get the raw query and the values as a tuple q, values = compiled.as_sql()[0] # Put the insert ignore statement q = q.replace('INSERT', 'INSERT IGNORE') # Get the cursor and execute cursor = connection.cursor() cursor.execute(q, values) return cursor
def bulk_insert_returning_ids(new_objects): """bulk_insert() does not set ids as per Django ticket #19527. However, postgres does support this, so we implement this manually in this function.""" new_objects = list(new_objects) if not new_objects: return None if connection.vendor == "postgresql": model = new_objects[0].__class__ query = sql.InsertQuery(model) query.insert_values(model._meta.fields[1:], new_objects) raw_sql, params = query.sql_with_params()[0] returning = "RETURNING {pk.db_column} AS {pk.name}".format(pk=model._meta.pk) new_objects = list(model.objects.raw("%s %s" % (raw_sql, returning), params)) else: # Do naive O(n) approach for new_obj in new_objects: new_obj.save() return new_objects
def _insert_query( self, model, objs, fields, return_id=True, raw=False, using=None, update=[], replace=False, ignore=False, ): query = sql.InsertQuery(model) query.insert_values(fields, objs, raw=raw) compiler = query.get_compiler(using=using) post_str = '' if (update): xupdate = self._build_update(fields, update) if (xupdate): update_str = ','.join([ "`%s` = %s" % (key, value) for key, value in six.iteritems(xupdate) ]) post_str = 'on duplicate key update %s' % (update_str) pre_replace_str = '' if (replace): pre_replace_str = 'REPLACE' if (ignore): pre_replace_str = 'INSERT IGNORE' return self._insert_query_execute_sql(compiler=compiler, return_id=return_id, pre_replace_str=pre_replace_str, post_str=post_str)
def bulk_insert_returning_ids(new_objects, fields=None): """bulk_insert() does not set ids as per Django ticket #19527. However, postgres does support this, so we implement this manually in this function.""" new_objects = list(new_objects) if not new_objects: return None if connection.vendor == "postgresql": model = new_objects[0].__class__ query = sql.InsertQuery(model) query.insert_values(model._meta.fields[1:], new_objects) raw_sql, params = query.sql_with_params()[0] fields = ", ".join([model._meta.pk.db_column] + (fields if fields else [])) new_objects = list( model.objects.raw( "{raw_sql} RETURNING {fields}".format(**locals()), params)) else: # Do naive O(n) approach for new_obj in new_objects: new_obj.save() return new_objects
def create_event(obj, *, label, using='default'): """Manually create a event for an object. Events are automatically linked with any context being tracked via `pghistory.context`. Args: obj (models.Model): An instance of a model. label (str): The event label. Raises: ValueError: If the event label has not been registered for the model. Returns: models.Model: The created event model. """ # Verify that the provided event is registered to the object model if (obj.__class__, label) not in _registered_events: raise ValueError(f'"{label}" is not a registered event for model' f' {obj._meta.object_name}.') event_model = _registered_events[(obj.__class__, label)] event_model_kwargs = { 'pgh_label': label, **{ field.attname: getattr(obj, field.attname) for field in event_model._meta.fields if not field.name.startswith('pgh_') }, } if hasattr(event_model, 'pgh_obj'): event_model_kwargs['pgh_obj'] = obj event_obj = event_model(**event_model_kwargs) # The event model is inserted manually with a custom SQL compiler # that attaches the context using the _pgh_attach_context # stored procedure. Django does not allow one to use F() # objects to reference stored procedures, so we have to # inject it with a custom SQL compiler here. query = sql.InsertQuery(event_model) query.insert_values( [ field for field in event_model._meta.fields if not isinstance(field, models.AutoField) ], [event_obj], ) vals = _InsertEventCompiler(query, connection, using='default').execute_sql( event_model._meta.fields) # Django <= 2.2 does not support returning fields from a bulk create, # which requires us to fetch fields again to populate the context # NOTE (@wesleykendall): We will eventually test multiple Django versions if isinstance(vals, int): # pragma: no cover return event_model.objects.get(pgh_id=vals) else: # Django >= 3.1 returns the values as a list of one element if isinstance(vals, list) and len(vals) == 1: vals = vals[0] for field, val in zip(event_model._meta.fields, vals): setattr(event_obj, field.attname, val) return event_obj
def insert_query_sqlx(model, values, return_id=False, raw_values=False): """overrides insert_query() from models.query module""" query = sql.InsertQuery(model, connection) query.insert_values(values, raw_values) return query.as_sql() # return SQL tuple