def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError( "Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError( "Datastore ids cannot start with __. Id was %s" % id_or_name) if not constraints.constraint_checks_enabled(self.model): # Fast path, just insert results.append(datastore.Put(ent)) else: markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) if not was_in_transaction: # We can cache if we weren't in a transaction before this little nested one caching.add_entity_to_cache( self.model, ent, caching.CachingSituation.DATASTORE_GET_PUT) except: # Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise
def Run(self, limit, offset): opts = self._gae_query._Query__query_options if opts.keys_only or opts.projection: return self._gae_query.Run(limit=limit, offset=offset) ret = caching.get_from_cache(self._identifier) if ret is not None and not utils.entity_matches_query( ret, self._gae_query): ret = None if ret is None: # We do a fast keys_only query to get the result keys_query = Query(self._gae_query._Query__kind, keys_only=True) keys_query.update(self._gae_query) keys = keys_query.Run(limit=limit, offset=offset) # Do a consistent get so we don't cache stale data, and recheck the result matches the query ret = [ x for x in datastore.Get(keys) if utils.entity_matches_query(x, self._gae_query) ] if len(ret) == 1: caching.add_entity_to_cache( self._model, ret[0], caching.CachingSituation.DATASTORE_GET) return iter(ret) return iter([ret])
def _update_entity(self, key): caching.remove_entity_from_cache_by_key(key) try: result = datastore.Get(key) except datastore_errors.EntityNotFoundError: # Return false to indicate update failure return False original = copy.deepcopy(result) instance_kwargs = { field.attname: value for field, param, value in self.values } # Note: If you replace MockInstance with self.model, you'll find that some delete # tests fail in the test app. This is because any unspecified fields would then call # get_default (even though we aren't going to use them) which may run a query which # fails inside this transaction. Given as we are just using MockInstance so that we can # call django_instance_to_entity it on it with the subset of fields we pass in, # what we have is fine. instance = MockInstance(**instance_kwargs) # Update the entity we read above with the new values result.update( django_instance_to_entity( self.connection, self.model, [x[0] for x in self.values], # Pass in the fields that were updated True, instance)) if not constraints.constraint_checks_enabled(self.model): # The fast path, no constraint checking datastore.Put(result) caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT) else: to_acquire, to_release = constraints.get_markers_for_update( self.model, original, result) # Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_cache( self.model, result, caching.CachingSituation.DATASTORE_PUT) except: constraints.release_identifiers(to_acquire) raise else: # Now we release the ones we don't want anymore constraints.release_identifiers(to_release) # Return true to indicate update success return True
def _update_entity(self, key): caching.remove_entity_from_cache_by_key(key) try: result = datastore.Get(key) except datastore_errors.EntityNotFoundError: # Return false to indicate update failure return False if ( isinstance(self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery and not utils.entity_matches_query(result, self.select.gae_query) ): # Due to eventual consistency they query may have returned an entity which no longer # matches the query return False original = copy.deepcopy(result) instance_kwargs = {field.attname:value for field, param, value in self.values} # Note: If you replace MockInstance with self.model, you'll find that some delete # tests fail in the test app. This is because any unspecified fields would then call # get_default (even though we aren't going to use them) which may run a query which # fails inside this transaction. Given as we are just using MockInstance so that we can # call django_instance_to_entity it on it with the subset of fields we pass in, # what we have is fine. instance = MockInstance(**instance_kwargs) # Update the entity we read above with the new values result.update(django_instance_to_entity( self.connection, self.model, [ x[0] for x in self.values], # Pass in the fields that were updated True, instance) ) if not constraints.constraint_checks_enabled(self.model): # The fast path, no constraint checking datastore.Put(result) caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT) else: to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result) # Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT) except: constraints.release_identifiers(to_acquire) raise else: # Now we release the ones we don't want anymore constraints.release_identifiers(to_release) # Return true to indicate update success return True
def Run(self, limit, offset): opts = self._gae_query._Query__query_options if opts.keys_only or opts.projection: return self._gae_query.Run(limit=limit, offset=offset) ret = caching.get_from_cache(self._identifier) if ret is None: ret = [ x for x in self._gae_query.Run(limit=limit, offset=offset) ] if len(ret) == 1: caching.add_entity_to_cache(self._model, ret[0], caching.CachingSituation.DATASTORE_GET) return iter(ret) return iter([ ret ])
def Run(self, limit=None, offset=None): assert not self.queries[ 0]._Query__ancestor_pb #FIXME: We don't handle this yet # FIXME: What if the query options differ? opts = self.queries[0]._Query__query_options results = None # If we have a single key lookup going on, just hit the cache if len(self.queries_by_key) == 1: keys = self.queries_by_key.keys() ret = caching.get_from_cache_by_key(keys[0]) if ret is not None: results = [ret] # If there was nothing in the cache, or we had more than one key, then use Get() if results is None: keys = self.queries_by_key.keys() results = datastore.Get(keys) for result in results: if result is None: continue caching.add_entity_to_cache( self.model, result, caching.CachingSituation.DATASTORE_GET) results = sorted((x for x in results if x is not None), cmp=partial(utils.django_ordering_comparison, self.ordering)) results = [ _convert_entity_based_on_query_options(x, opts) for x in results if any([ utils.entity_matches_query(x, qry) for qry in self.queries_by_key[x.key()] ]) ] if offset: results = results[offset:] if limit is not None: results = results[:limit] return iter(results)
def Run(self, limit, offset): opts = self._gae_query._Query__query_options if opts.keys_only or opts.projection: return self._gae_query.Run(limit=limit, offset=offset) ret = caching.get_from_cache(self._identifier) if ret is not None and not utils.entity_matches_query(ret, self._gae_query): ret = None if ret is None: # We do a fast keys_only query to get the result keys_query = Query(self._gae_query._Query__kind, keys_only=True) keys_query.update(self._gae_query) keys = keys_query.Run(limit=limit, offset=offset) # Do a consistent get so we don't cache stale data, and recheck the result matches the query ret = [ x for x in datastore.Get(keys) if x and utils.entity_matches_query(x, self._gae_query) ] if len(ret) == 1: caching.add_entity_to_cache(self._model, ret[0], caching.CachingSituation.DATASTORE_GET) return iter(ret) return iter([ ret ])
def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError("Datastore ids cannot start with __. Id was %s" % id_or_name) if not constraints.constraint_checks_enabled(self.model): # Fast path, just insert results.append(datastore.Put(ent)) else: markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) if not was_in_transaction: # We can cache if we weren't in a transaction before this little nested one caching.add_entity_to_cache(self.model, ent, caching.CachingSituation.DATASTORE_GET_PUT) except: # Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise
def Run(self, limit=None, offset=None): assert not self.queries[0]._Query__ancestor_pb #FIXME: We don't handle this yet # FIXME: What if the query options differ? opts = self.queries[0]._Query__query_options results = None # If we have a single key lookup going on, just hit the cache if len(self.queries_by_key) == 1: keys = self.queries_by_key.keys() ret = caching.get_from_cache_by_key(keys[0]) if ret is not None: results = [ret] # If there was nothing in the cache, or we had more than one key, then use Get() if results is None: keys = self.queries_by_key.keys() results = datastore.Get(keys) for result in results: if result is None: continue caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_GET) results = sorted((x for x in results if x is not None), cmp=partial(utils.django_ordering_comparison, self.ordering)) results = [ _convert_entity_based_on_query_options(x, opts) for x in results if any([ utils.entity_matches_query(x, qry) for qry in self.queries_by_key[x.key()]]) ] if offset: results = results[offset:] if limit is not None: results = results[:limit] return iter(results)
def iter_results(results): returned = 0 for result in sorted(results, cmp=partial(utils.django_ordering_comparison, self.ordering)): if result is None: continue if not any([utils.entity_matches_query(result, qry) for qry in self.queries_by_key[result.key()]]): continue if offset and returned < offset: # Skip entities based on offset returned += 1 continue else: if cache: caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_GET) yield _convert_entity_based_on_query_options(result, opts) returned += 1 # If there is a limit, we might be done! if limit is not None and returned == (offset or 0) + limit: break
def execute(self): if self.has_pk and not has_concrete_parents(self.model): results = [] # We are inserting, but we specified an ID, we need to check for existence before we Put() # We do it in a loop so each check/put is transactional - because it's an ancestor query it shouldn't # cost any entity groups was_in_transaction = datastore.IsInTransaction() for key, ent in zip(self.included_keys, self.entities): @db.transactional def txn(): if key is not None: if utils.key_exists(key): raise IntegrityError("Tried to INSERT with existing key") id_or_name = key.id_or_name() if isinstance(id_or_name, basestring) and id_or_name.startswith("__"): raise NotSupportedError("Datastore ids cannot start with __. Id was %s" % id_or_name) if not constraints.constraint_checks_enabled(self.model): # Fast path, just insert results.append(datastore.Put(ent)) else: markers = constraints.acquire(self.model, ent) try: results.append(datastore.Put(ent)) if not was_in_transaction: # We can cache if we weren't in a transaction before this little nested one caching.add_entity_to_cache(self.model, ent, caching.CachingSituation.DATASTORE_GET_PUT) except: # Make sure we delete any created markers before we re-raise constraints.release_markers(markers) raise # Make sure we notify app engine that we are using this ID # FIXME: Copy ancestor across to the template key reserve_id(key.kind(), key.id_or_name()) txn() return results else: if not constraints.constraint_checks_enabled(self.model): # Fast path, just bulk insert results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_cache(self.model, entity, caching.CachingSituation.DATASTORE_PUT) return results else: markers = [] try: #FIXME: We should rearrange this so that each entity is handled individually like above. We'll # lose insert performance, but gain consistency on errors which is more important markers = constraints.acquire_bulk(self.model, self.entities) results = datastore.Put(self.entities) for entity in self.entities: caching.add_entity_to_cache(self.model, entity, caching.CachingSituation.DATASTORE_PUT) except: to_delete = chain(*markers) constraints.release_markers(to_delete) raise for ent, k, m in zip(self.entities, results, markers): ent.__key = k constraints.update_instance_on_markers(ent, m) return results
def _update_entity(self, key): caching.remove_entity_from_cache_by_key(key) try: result = datastore.Get(key) except datastore_errors.EntityNotFoundError: # Return false to indicate update failure return False if (isinstance( self.select.gae_query, (Query, UniqueQuery)) # ignore QueryByKeys and NoOpQuery and not utils.entity_matches_query(result, self.select.gae_query)): # Due to eventual consistency they query may have returned an entity which no longer # matches the query return False original = copy.deepcopy(result) instance_kwargs = { field.attname: value for field, param, value in self.values } # Note: If you replace MockInstance with self.model, you'll find that some delete # tests fail in the test app. This is because any unspecified fields would then call # get_default (even though we aren't going to use them) which may run a query which # fails inside this transaction. Given as we are just using MockInstance so that we can # call django_instance_to_entity it on it with the subset of fields we pass in, # what we have is fine. instance = MockInstance(**instance_kwargs) # We need to add to the class attribute, rather than replace it! original_class = result.get(POLYMODEL_CLASS_ATTRIBUTE, []) # Update the entity we read above with the new values result.update( django_instance_to_entity( self.connection, self.model, [x[0] for x in self.values], # Pass in the fields that were updated True, instance)) # Make sure we keep all classes in the inheritence tree! if original_class: if result[POLYMODEL_CLASS_ATTRIBUTE] is not None: result[POLYMODEL_CLASS_ATTRIBUTE].extend(original_class) # Make sure we don't add duplicates else: result[POLYMODEL_CLASS_ATTRIBUTE] = original_class if POLYMODEL_CLASS_ATTRIBUTE in result: result[POLYMODEL_CLASS_ATTRIBUTE] = list( set(result[POLYMODEL_CLASS_ATTRIBUTE])) if not constraints.constraint_checks_enabled(self.model): # The fast path, no constraint checking datastore.Put(result) caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT) else: to_acquire, to_release = constraints.get_markers_for_update( self.model, original, result) # Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_cache( self.model, result, caching.CachingSituation.DATASTORE_PUT) except: constraints.release_identifiers(to_acquire) raise else: # Now we release the ones we don't want anymore constraints.release_identifiers(to_release) # Return true to indicate update success return True
def _update_entity(self, key): caching.remove_entity_from_cache_by_key(key) try: result = datastore.Get(key) except datastore_errors.EntityNotFoundError: reserve_id(key.kind(), key.id_or_name()) result = datastore.Entity(key.kind(), id=key.id_or_name()) original = copy.deepcopy(result) instance_kwargs = { field.attname: value for field, param, value in self.values } instance = MockInstance(**instance_kwargs) for field, param, value in self.values: column_value = get_prepared_db_value(self.connection, instance, field, raw=True) result[field.column] = column_value # Add special indexed fields for index in special_indexes_for_column(self.model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] values = indexer.prep_value_for_database(column_value) if values is None: continue if not hasattr(values, "__iter__"): values = [values] for value in values: column = indexer.indexed_column_name(field.column, value) if column in result: if not isinstance(result[column], list): result[column] = [result[column], value] else: result[column].append(value) else: result[column] = value if not constraints.constraint_checks_enabled(self.model): # The fast path, no constraint checking datastore.Put(result) caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT) else: to_acquire, to_release = constraints.get_markers_for_update( self.model, original, result) # Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_cache( self.model, result, caching.CachingSituation.DATASTORE_PUT) except: constraints.release_identifiers(to_acquire) raise else: # Now we release the ones we don't want anymore constraints.release_identifiers(to_release)