def execute(self): if self.has_pk and not has_concrete_parents(self.model): results = [] #We are inserting, but we specified an ID, we need to check for existence before we Put() #We do it in a loop so each check/put is transactional - because it's an ancestor query it shouldn't #cost any entity groups for key, ent in zip(self.included_keys, self.entities): @db.transactional def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError("Datastore ids cannot start with __. Id was %s" % id_or_name) markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) caching.add_entity_to_context_cache(self.model, ent) except: #Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise txn() return results else: #FIXME: We should rearrange this so that each entity is handled individually like above. We'll #lose insert performance, but gain consistency on errors which is more important markers = constraints.acquire_bulk(self.model, self.entities) try: results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_context_cache(self.model, entity) except: to_delete = chain(*markers) constraints.release_markers(to_delete) raise for ent, m in zip(self.entities, markers): constraints.update_instance_on_markers(ent, m) return results
def execute(self): if self.has_pk and not has_concrete_parents(self.model): results = [] #We are inserting, but we specified an ID, we need to check for existence before we Put() #We do it in a loop so each check/put is transactional - because it's an ancestor query it shouldn't #cost any entity groups for key, ent in zip(self.included_keys, self.entities): @db.transactional def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) except: #Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise entity_post_insert.send(sender=self.model, entity=ent) txn() return results else: markers = constraints.acquire_bulk(self.model, self.entities) try: results = datastore.Put(self.entities) except: to_delete = chain(*markers) constraints.release_markers(to_delete) raise for ent, m in zip(self.entities, markers): constraints.update_instance_on_markers(ent, m) entity_post_insert.send(sender=self.model, entity=ent) return results
def execute(self): if self.has_pk and not has_concrete_parents(self.model): results = [] # We are inserting, but we specified an ID, we need to check for existence before we Put() # We do it in a loop so each check/put is transactional - because it's an ancestor query it shouldn't # cost any entity groups was_in_transaction = datastore.IsInTransaction() for key, ent in zip(self.included_keys, self.entities): @db.transactional def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError("Datastore ids cannot start with __. Id was %s" % id_or_name) if not constraints.constraint_checks_enabled(self.model): # Fast path, just insert results.append(datastore.Put(ent)) else: markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) if not was_in_transaction: # We can cache if we weren't in a transaction before this little nested one caching.add_entity_to_cache(self.model, ent, caching.CachingSituation.DATASTORE_GET_PUT) except: # Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise # Make sure we notify app engine that we are using this ID # FIXME: Copy ancestor across to the template key reserve_id(key.kind(), key.id_or_name()) txn() return results else: if not constraints.constraint_checks_enabled(self.model): # Fast path, just bulk insert results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_cache(self.model, entity, caching.CachingSituation.DATASTORE_PUT) return results else: markers = [] try: #FIXME: We should rearrange this so that each entity is handled individually like above. We'll # lose insert performance, but gain consistency on errors which is more important markers = constraints.acquire_bulk(self.model, self.entities) results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_cache(self.model, entity, caching.CachingSituation.DATASTORE_PUT) except: to_delete = chain(*markers) constraints.release_markers(to_delete) raise for ent, k, m in zip(self.entities, results, markers): ent.__key = k constraints.update_instance_on_markers(ent, m) return results
def execute(self): if self.has_pk and not has_concrete_parents(self.model): results = [] # We are inserting, but we specified an ID, we need to check for existence before we Put() # We do it in a loop so each check/put is transactional - because it's an ancestor query it shouldn't # cost any entity groups for key, ent in zip(self.included_keys, self.entities): @db.transactional def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError("Datastore ids cannot start with __. Id was %s" % id_or_name) if not constraints.constraint_checks_enabled(self.model): # Fast path, just insert results.append(datastore.Put(ent)) else: markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) caching.add_entity_to_context_cache(self.model, ent) except: # Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise # Make sure we notify app engine that we are using this ID # FIXME: Copy ancestor across to the template key id_or_name = key.id_or_name() if isinstance(id_or_name, (int, long)): try: db.allocate_id_range(datastore.Key.from_path(key.kind(), 1), id_or_name, id_or_name) except: # We don't re-raise because it's not terminal, but if this happens we need to know why logging.exception("An error occurred when notifying app engine that an ID has been used. Please report.") txn() return results else: if not constraints.constraint_checks_enabled(self.model): # Fast path, just bulk insert results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_context_cache(self.model, entity) return results else: markers = [] try: #FIXME: We should rearrange this so that each entity is handled individually like above. We'll # lose insert performance, but gain consistency on errors which is more important markers = constraints.acquire_bulk(self.model, self.entities) results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_context_cache(self.model, entity) except: to_delete = chain(*markers) constraints.release_markers(to_delete) raise for ent, m in zip(self.entities, markers): constraints.update_instance_on_markers(ent, m) return results