def _update_entity(self, key): result = datastore.Get(key) original = copy.deepcopy(result) for field, param, value in self.values: result[field.column] = get_prepared_db_value(self.connection, MockInstance(field, value), field) #Add special indexed fields for index in special_indexes_for_column(self.model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] result[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value) entity_pre_update.send(sender=self.model, entity=result) to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result) #Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) except: constraints.release_identifiers(to_acquire) raise else: #Now we release the ones we don't want anymore constraints.release_identifiers(to_release) entity_post_update.send(sender=self.model, entity=result)
def _update_entity(self, key): caching.remove_entity_from_context_cache_by_key(key) result = datastore.Get(key) original = copy.deepcopy(result) instance_kwargs = {field.attname:value for field, param, value in self.values} instance = MockInstance(**instance_kwargs) for field, param, value in self.values: result[field.column] = get_prepared_db_value(self.connection, instance, field, raw=True) #Add special indexed fields for index in special_indexes_for_column(self.model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] result[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value) if not constraints.constraint_checks_enabled(self.model): #The fast path, no constraint checking datastore.Put(result) caching.add_entity_to_context_cache(self.model, result) else: to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result) #Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_context_cache(self.model, result) except: constraints.release_identifiers(to_acquire) raise else: #Now we release the ones we don't want anymore constraints.release_identifiers(to_release)
def _update_entity(self, key): caching.remove_entity_from_context_cache_by_key(key) result = datastore.Get(key) original = copy.deepcopy(result) for field, param, value in self.values: result[field.column] = get_prepared_db_value(self.connection, MockInstance(field, value), field) #Add special indexed fields for index in special_indexes_for_column(self.model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] result[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value) to_acquire, to_release = constraints.get_markers_for_update(self.model, original, result) #Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_context_cache(self.model, result) except: constraints.release_identifiers(to_acquire) raise else: #Now we release the ones we don't want anymore constraints.release_identifiers(to_release)
def _update_entity(self, key): result = datastore.Get(key) for field, param, value in self.values: result[field.column] = get_prepared_db_value(self.connection, MockInstance(field, value), field) #Add special indexed fields for index in special_indexes_for_column(self.model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] result[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value) datastore.Put(result)
def parse_constraint(child, connection, negated=False): #First, unpack the constraint constraint, op, annotation, value = child was_list = isinstance(value, (list, tuple)) packed, value = constraint.process(op, value, connection) alias, column, db_type = packed if constraint.field.db_type(connection) in ("bytes", "text"): raise NotSupportedError("Text and Blob fields are not indexed by the datastore, so you can't filter on them") if op not in REQUIRES_SPECIAL_INDEXES: #Don't convert if this op requires special indexes, it will be handled there value = [ connection.ops.prep_lookup_value(constraint.field.model, x, constraint.field, constraint=constraint) for x in value] #Don't ask me why, but constraint.process on isnull wipes out the value (it returns an empty list) # so we have to special case this to use the annotation value instead if op == "isnull": value = [ annotation ] if constraint.field.primary_key and value[0]: raise EmptyResultSet() if not was_list: value = value[0] else: if negated: raise CouldBeSupportedError("Special indexing does not currently supported negated queries. See #80") if not was_list: value = value[0] add_special_index(constraint.field.model, column, op) #Add the index if we can (e.g. on dev_appserver) if op not in special_indexes_for_column(constraint.field.model, column): raise RuntimeError("There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]".format( constraint.field.model, column, op) ) indexer = REQUIRES_SPECIAL_INDEXES[op] column = indexer.indexed_column_name(column) value = indexer.prep_value_for_query(value) op = indexer.prep_query_operator(op) return column, op, value
def parse_constraint(child, connection): #First, unpack the constraint constraint, op, annotation, value = child was_list = isinstance(value, (list, tuple)) packed, value = constraint.process(op, value, connection) alias, column, db_type = packed if constraint.field.db_type(connection) in ("bytes", "text"): raise NotSupportedError("Text and Blob fields are not indexed by the datastore, so you can't filter on them") if op not in REQUIRES_SPECIAL_INDEXES: #Don't convert if this op requires special indexes, it will be handled there value = [ connection.ops.prep_lookup_value(constraint.field.model, x, constraint.field, constraint=constraint) for x in value] #Don't ask me why, but constraint.process on isnull wipes out the value (it returns an empty list) # so we have to special case this to use the annotation value instead if op == "isnull": value = [ annotation ] if not was_list: value = value[0] else: if not was_list: value = value[0] add_special_index(constraint.field.model, column, op) #Add the index if we can (e.g. on dev_appserver) if op not in special_indexes_for_column(constraint.field.model, column): raise RuntimeError("There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]".format( constraint.field.model, column, op) ) indexer = REQUIRES_SPECIAL_INDEXES[op] column = indexer.indexed_column_name(column) value = indexer.prep_value_for_query(value) op = indexer.prep_query_operator(op) return column, op, value
def parse_constraint(child, connection, negated=False): if isinstance(child, tuple): # First, unpack the constraint constraint, op, annotation, value = child was_list = isinstance(value, (list, tuple)) if isinstance(value, query.Query): value = value.get_compiler(connection.alias).as_sql()[0].execute() else: packed, value = constraint.process(op, value, connection) alias, column, db_type = packed field = constraint.field else: # Django 1.7+ field = child.lhs.target column = child.lhs.target.column op = child.lookup_name value = child.rhs annotation = value was_list = isinstance(value, (list, tuple)) if isinstance(value, query.Query): value = value.get_compiler(connection.alias).as_sql()[0].execute() elif value != []: value = child.lhs.output_field.get_db_prep_lookup( child.lookup_name, child.rhs, connection, prepared=True) is_pk = field and field.primary_key if column == "id" and op == "iexact" and is_pk and isinstance(field, AutoField): # When new instance is created, automatic primary key 'id' does not generate '_idx_iexact_id'. # As the primary key 'id' (AutoField) is integer and is always case insensitive, we can deal with 'id_iexact=' query by using 'exact' rather than 'iexact'. op = "exact" if field and field.db_type(connection) in ("bytes", "text"): raise NotSupportedError("Text and Blob fields are not indexed by the datastore, so you can't filter on them") if op not in REQUIRES_SPECIAL_INDEXES: # Don't convert if this op requires special indexes, it will be handled there if field: value = [ connection.ops.prep_lookup_value(field.model, x, field, column=column) for x in value] # Don't ask me why, but on Django 1.6 constraint.process on isnull wipes out the value (it returns an empty list) # so we have to special case this to use the annotation value instead if op == "isnull": if annotation is not None: value = [ annotation ] if is_pk and value[0]: raise EmptyResultSet() if not was_list: value = value[0] else: if negated: raise CouldBeSupportedError("Special indexing does not currently supported negated queries. See #80") if not was_list: value = value[0] add_special_index(field.model, column, op) # Add the index if we can (e.g. on dev_appserver) if op not in special_indexes_for_column(field.model, column): raise RuntimeError("There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]".format( field.model, column, op) ) indexer = REQUIRES_SPECIAL_INDEXES[op] value = indexer.prep_value_for_query(value) column = indexer.indexed_column_name(column, value=value) op = indexer.prep_query_operator(op) return column, op, value
def django_instance_to_entity(connection, model, fields, raw, instance): # uses_inheritance = False inheritance_root = get_top_concrete_parent(model) db_table = get_datastore_kind(inheritance_root) def value_from_instance(_instance, _field): value = get_prepared_db_value(connection, _instance, _field, raw) if (not _field.null and not _field.primary_key) and value is None: raise IntegrityError("You can't set %s (a non-nullable " "field) to None!" % _field.name) is_primary_key = False if _field.primary_key and _field.model == inheritance_root: is_primary_key = True return value, is_primary_key concrete_classes = get_concrete_parents(model) classes = None if len(concrete_classes) > 1: classes = [ x._meta.db_table for x in concrete_classes ] for klass in concrete_classes[1:]: # Ignore the current model for field in klass._meta.fields: fields.append(field) # Add any parent fields field_values = {} primary_key = None for field in fields: value, is_primary_key = value_from_instance(instance, field) if is_primary_key: primary_key = value else: field_values[field.column] = value # Add special indexed fields for index in special_indexes_for_column(model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] field_values[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value) kwargs = {} if primary_key: if isinstance(primary_key, (int, long)): kwargs["id"] = primary_key elif isinstance(primary_key, basestring): if len(primary_key) >= 500: warnings.warn("Truncating primary key that is over 500 characters. " "THIS IS AN ERROR IN YOUR PROGRAM.", RuntimeWarning) primary_key = primary_key[:500] kwargs["name"] = primary_key else: raise ValueError("Invalid primary key value") entity = datastore.Entity(db_table, **kwargs) entity.update(field_values) if classes: entity["class"] = classes return entity
def _build_gae_query(self): """ Build and return the Datstore Query object. """ combined_filters = [] query_kwargs = {} if self.keys_only: query_kwargs["keys_only"] = self.keys_only elif self.projection: query_kwargs["projection"] = self.projection query = Query( self.db_table, **query_kwargs ) if has_concrete_parents(self.model) and not self.model._meta.proxy: query["class ="] = self.model._meta.db_table DJANGAE_LOG.debug("Select query: {0}, {1}".format(self.model.__name__, self.where)) for column, op, value in self.where: if column == self.pk_col: column = "__key__" final_op = OPERATORS_MAP.get(op) if final_op is None: if op in REQUIRES_SPECIAL_INDEXES: add_special_index(self.model, column, op) #Add the index if we can (e.g. on dev_appserver) if op not in special_indexes_for_column(self.model, column): raise RuntimeError("There is a missing index in your djangaeidx.yaml - \n\n{0}:\n\t{1}: [{2}]".format( self.model, column, op) ) indexer = REQUIRES_SPECIAL_INDEXES[op] column = indexer.indexed_column_name(column) value = indexer.prep_value_for_query(value) query["%s =" % column] = value else: if op == "in": combined_filters.append((column, op, value)) elif op == "gt_and_lt": combined_filters.append((column, op, value)) elif op == "isnull": query["%s =" % column] = None elif op == "startswith": #You can emulate starts with by adding the last unicode char #to the value, then doing <=. Genius. query["%s >=" % column] = value if isinstance(value, str): value = value.decode("utf-8") value += u'\ufffd' query["%s <=" % column] = value else: raise NotImplementedError("Unimplemented operator {0}".format(op)) else: query["%s %s" % (column, final_op)] = value ordering = [] for order in self.ordering: if isinstance(order, int): direction = datastore.Query.ASCENDING if order == 1 else datastore.Query.DESCENDING order = self.queried_fields[0] else: direction = datastore.Query.DESCENDING if order.startswith("-") else datastore.Query.ASCENDING order = order.lstrip("-") if order == self.model._meta.pk.column: order = "__key__" ordering.append((order, direction)) if combined_filters: queries = [ query ] for column, op, value in combined_filters: new_queries = [] for query in queries: if op == "in": for val in value: new_query = datastore.Query(self.model._meta.db_table) new_query.update(query) new_query["%s =" % column] = val new_queries.append(new_query) elif op == "gt_and_lt": for tmp_op in ("<", ">"): new_query = datastore.Query(self.model._meta.db_table) new_query.update(query) new_query["%s %s" % (column, tmp_op)] = value new_queries.append(new_query) queries = new_queries query = datastore.MultiQuery(queries, ordering) elif ordering: query.Order(*ordering) return query
def django_instance_to_entity(connection, model, fields, raw, instance, check_null=True): # uses_inheritance = False inheritance_root = get_top_concrete_parent(model) db_table = get_datastore_kind(inheritance_root) def value_from_instance(_instance, _field): value = get_prepared_db_value(connection, _instance, _field, raw) if check_null and (not _field.null and not _field.primary_key) and value is None: raise IntegrityError("You can't set %s (a non-nullable " "field) to None!" % _field.name) is_primary_key = False if _field.primary_key and _field.model == inheritance_root: is_primary_key = True return value, is_primary_key field_values = {} primary_key = None for field in fields: value, is_primary_key = value_from_instance(instance, field) if is_primary_key: primary_key = value else: field_values[field.column] = value # Add special indexed fields for index in special_indexes_for_column(model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] values = indexer.prep_value_for_database(value) if values is None: continue if not hasattr(values, "__iter__"): values = [ values ] for v in values: column = indexer.indexed_column_name(field.column, v) if column in field_values: if not isinstance(field_values[column], list): field_values[column] = [ field_values[column], v ] else: field_values[column].append(v) else: field_values[column] = v kwargs = {} if primary_key: if isinstance(primary_key, (int, long)): kwargs["id"] = primary_key elif isinstance(primary_key, basestring): if len(primary_key) > 500: warnings.warn("Truncating primary key that is over 500 characters. " "THIS IS AN ERROR IN YOUR PROGRAM.", RuntimeWarning) primary_key = primary_key[:500] kwargs["name"] = primary_key else: raise ValueError("Invalid primary key value") entity = datastore.Entity(db_table, **kwargs) entity.update(field_values) classes = get_concrete_db_tables(model) if len(classes) > 1: entity["class"] = classes return entity
def django_instance_to_entity(connection, model, fields, raw, instance, check_null=True): # uses_inheritance = False inheritance_root = get_top_concrete_parent(model) db_table = get_datastore_kind(inheritance_root) def value_from_instance(_instance, _field): value = get_prepared_db_value(connection, _instance, _field, raw) if check_null and (not _field.null and not _field.primary_key) and value is None: raise IntegrityError("You can't set %s (a non-nullable " "field) to None!" % _field.name) is_primary_key = False if _field.primary_key and _field.model == inheritance_root: is_primary_key = True return value, is_primary_key field_values = {} primary_key = None for field in fields: value, is_primary_key = value_from_instance(instance, field) if is_primary_key: primary_key = value else: field_values[field.column] = value # Add special indexed fields for index in special_indexes_for_column(model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] values = indexer.prep_value_for_database(value) if values is None: continue if not hasattr(values, "__iter__"): values = [values] for v in values: column = indexer.indexed_column_name(field.column, v) if column in field_values: if not isinstance(field_values[column], list): field_values[column] = [field_values[column], v] else: field_values[column].append(v) else: field_values[column] = v kwargs = {} if primary_key: if isinstance(primary_key, (int, long)): kwargs["id"] = primary_key elif isinstance(primary_key, basestring): if len(primary_key) > 500: warnings.warn( "Truncating primary key that is over 500 characters. " "THIS IS AN ERROR IN YOUR PROGRAM.", RuntimeWarning) primary_key = primary_key[:500] kwargs["name"] = primary_key else: raise ValueError("Invalid primary key value") entity = datastore.Entity(db_table, **kwargs) entity.update(field_values) classes = get_concrete_db_tables(model) if len(classes) > 1: entity["class"] = classes return entity
def django_instance_to_entity(connection, model, fields, raw, instance): # uses_inheritance = False inheritance_root = get_top_concrete_parent(model) db_table = get_datastore_kind(inheritance_root) def value_from_instance(_instance, _field): value = get_prepared_db_value(connection, _instance, _field, raw) if (not _field.null and not _field.primary_key) and value is None: raise IntegrityError("You can't set %s (a non-nullable " "field) to None!" % _field.name) is_primary_key = False if _field.primary_key and _field.model == inheritance_root: is_primary_key = True return value, is_primary_key concrete_classes = get_concrete_parents(model) classes = None if len(concrete_classes) > 1: classes = [ x._meta.db_table for x in concrete_classes ] for klass in concrete_classes[1:]: #Ignore the current model for field in klass._meta.fields: fields.append(field) #Add any parent fields field_values = {} primary_key = None # primary.key = self.model._meta.pk for field in fields: value, is_primary_key = value_from_instance(instance, field) if is_primary_key: primary_key = value else: field_values[field.column] = value #Add special indexed fields for index in special_indexes_for_column(model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] field_values[indexer.indexed_column_name(field.column)] = indexer.prep_value_for_database(value) kwargs = {} if primary_key: if isinstance(primary_key, int): kwargs["id"] = primary_key elif isinstance(primary_key, basestring): if len(primary_key) >= 500: warnings.warn("Truncating primary key" " that is over 500 characters. THIS IS AN ERROR IN YOUR PROGRAM.", RuntimeWarning ) primary_key = primary_key[:500] kwargs["name"] = primary_key else: raise ValueError("Invalid primary key value") entity = datastore.Entity(db_table, **kwargs) entity.update(field_values) if classes: entity["class"] = classes #print inheritance_root.__name__ if inheritance_root else "None", model.__name__, entity return entity
def _update_entity(self, key): caching.remove_entity_from_cache_by_key(key) try: result = datastore.Get(key) except datastore_errors.EntityNotFoundError: reserve_id(key.kind(), key.id_or_name()) result = datastore.Entity(key.kind(), id=key.id_or_name()) original = copy.deepcopy(result) instance_kwargs = { field.attname: value for field, param, value in self.values } instance = MockInstance(**instance_kwargs) for field, param, value in self.values: column_value = get_prepared_db_value(self.connection, instance, field, raw=True) result[field.column] = column_value # Add special indexed fields for index in special_indexes_for_column(self.model, field.column): indexer = REQUIRES_SPECIAL_INDEXES[index] values = indexer.prep_value_for_database(column_value) if values is None: continue if not hasattr(values, "__iter__"): values = [values] for value in values: column = indexer.indexed_column_name(field.column, value) if column in result: if not isinstance(result[column], list): result[column] = [result[column], value] else: result[column].append(value) else: result[column] = value if not constraints.constraint_checks_enabled(self.model): # The fast path, no constraint checking datastore.Put(result) caching.add_entity_to_cache(self.model, result, caching.CachingSituation.DATASTORE_PUT) else: to_acquire, to_release = constraints.get_markers_for_update( self.model, original, result) # Acquire first, because if that fails then we don't want to alter what's already there constraints.acquire_identifiers(to_acquire, result.key()) try: datastore.Put(result) caching.add_entity_to_cache( self.model, result, caching.CachingSituation.DATASTORE_PUT) except: constraints.release_identifiers(to_acquire) raise else: # Now we release the ones we don't want anymore constraints.release_identifiers(to_release)