Пример #1
0
def _extract_projected_columns_from_query_17(query):
    result = []

    if query.select:
        for x in query.select:
            if x.field is None:
                model = get_model_from_db_table(x.col.col[0])
                if get_top_concrete_parent(model) != get_top_concrete_parent(query.model):
                    raise NotSupportedError("Attempted a cross-join select which is not supported on the datastore")

                column = x.col.col[1]  # This is the column we are getting
            else:
                column = x.field.column

            result.append(column)
        return result
    else:
        # If the query uses defer()/only() then we need to process deferred. We have to get all deferred columns
        # for all (concrete) inherited models and then only include columns if they appear in that list
        only_load = query.get_loaded_field_names()
        if only_load:
            for field, model in query.model._meta.get_concrete_fields_with_model():
                model = model or query.model
                try:
                    if field.name in only_load[model]:
                        # Add a field that has been explicitly included
                        result.append(field.column)
                except KeyError:
                    # Model wasn't explicitly listed in the only_load table
                    # Therefore, we need to load all fields from this model
                    result.append(field.column)
            return result
        else:
            return []
Пример #2
0
    def set_leaf(self, column, operator, value, is_pk_field, negated, namespace, target_field=None):
        assert column
        assert operator
        assert isinstance(is_pk_field, bool)
        assert isinstance(negated, bool)

        if operator == "iexact" and isinstance(target_field, AutoField):
            # When new instance is created, automatic primary key 'id' does not generate '_idx_iexact_id'.
            # As the primary key 'id' (AutoField) is integer and is always case insensitive,
            # we can deal with 'id_iexact=' query by using 'exact' rather than 'iexact'.
            operator = "exact"
            value = int(value)

        if is_pk_field:
            # If this is a primary key, we need to make sure that the value
            # we pass to the query is a datastore Key. We have to deal with IN queries here
            # because they aren't flattened until the DNF stage
            model = get_top_concrete_parent(target_field.model)
            table = model._meta.db_table

            if isinstance(value, (list, tuple)):
                value = [
                    datastore.Key.from_path(table, x, namespace=namespace)
                    for x in value if x
                ]
            else:
                if operator == "isnull" and value is True:
                    # FIXME: Strictly, this isn't correct, this could be one of several branches
                    # but id=None filters are silly anyway. This should be moved to after normalization..
                    # probably. This fixes a test in Django which does this in get_or_create for some reason
                    raise EmptyResultSet()

                if not value:
                    # Empty strings and 0 are forbidden as keys
                    # so make this an impossible filter
                    # FIXME: This is a hack! It screws with the ordering
                    # because it's an inequality. Instead we should wipe this
                    # filter out when preprocessing in the DNF (because it's impossible)
                    value = datastore.Key.from_path('', 1)
                    operator = '<'
                else:
                    value = datastore.Key.from_path(table, value, namespace=namespace)
            column = "__key__"

        # Do any special index conversions necessary to perform this lookup
        if operator in REQUIRES_SPECIAL_INDEXES:
            if is_pk_field:
                column = model._meta.pk.column
                value = unicode(value.id_or_name())

            add_special_index(target_field.model, column, operator, value)
            indexer = REQUIRES_SPECIAL_INDEXES[operator]
            index_type = indexer.prepare_index_type(operator, value)
            value = indexer.prep_value_for_query(value)
            column = indexer.indexed_column_name(column, value, index_type)
            operator = indexer.prep_query_operator(operator)

        self.column = column
        self.operator = convert_operator(operator)
        self.value = value
Пример #3
0
    def get_transformed_query(self):
        self._prepare_for_transformation()

        kind = self._determine_query_kind()

        ret = Query(self.model, kind)
        ret.connection = self.connection

        # Add the root concrete table as the source table
        root_table = get_top_concrete_parent(self.model)._meta.db_table
        ret.add_source_table(root_table)

        self._apply_ordering_to_query(ret)
        self._set_projected_columns_on_query(ret)
        self._apply_extra_selects_to_query(ret)
        self._apply_distinct_columns_to_query(ret)
        self._apply_annotations_to_query(ret)

        # Extract any query offsets and limits
        ret.low_mark = self.django_query.low_mark
        ret.high_mark = self.django_query.high_mark

        output = self._generate_where_node()

        # If there no child nodes, just wipe out the where
        if not output.children:
            output = None

        ret.where = output
        return ret
Пример #4
0
    def __init__(self, model, kind):
        assert kind in VALID_QUERY_KINDS

        self.model = model
        self.concrete_model = get_top_concrete_parent(model)
        self.kind = kind

        self.projection_possible = True
        self.tables = []

        self.columns = None # None means all fields
        self.init_list = []

        self.distinct = False
        self.order_by = []
        self.row_data = [] # For insert/updates
        self._where = None
        self.low_mark = self.high_mark = None

        self.annotations = []
        self.per_entity_annotations = []
        self.extra_selects = []
        self.polymodel_filter_added = False

        # A list of PKs that should be excluded from the resultset
        self.excluded_pks = set()
Пример #5
0
 def _set_db_table(self):
     """ Work out which Datastore kind we should actually be querying. This allows for poly
         models, i.e. non-abstract parent models which we support by storing all fields for
         both the parent model and its child models on the parent table.
     """
     inheritance_root = get_top_concrete_parent(self.model)
     self.db_table = inheritance_root._meta.db_table
Пример #6
0
def _transform_query_18(connection, kind, query):
    from django.db.models.sql.where import EmptyWhere
    if isinstance(query.where, EmptyWhere):
        # Empty where means return nothing!
        raise EmptyResultSet()

    ret = Query(query.model, kind)
    ret.connection = connection

    # Add the root concrete table as the source table
    root_table = get_top_concrete_parent(query.model)._meta.db_table
    ret.add_source_table(root_table)

    # Extract the ordering of the query results
    for order_col in _extract_ordering_from_query_18(query):
        ret.add_order_by(order_col)

    # Extract any projected columns (values/values_list/only/defer)
    for projected_col in _extract_projected_columns_from_query_18(query):
        ret.add_projected_column(projected_col)

    # Add any extra selects
    for col, select in query.extra_select.items():
        ret.add_extra_select(col, select[0])

    if query.distinct:
        # This must happen after extracting projected cols
        ret.set_distinct(list(query.distinct_fields))

    # Process annotations!
    if query.annotation_select:
        for k, v in query.annotation_select.items():
            ret.add_annotation(k, v)

    # Extract any query offsets and limits
    ret.low_mark = query.low_mark
    ret.high_mark = query.high_mark

    output = WhereNode()
    output.connector = query.where.connector

    _walk_django_where(
        query,
        _django_18_query_walk_trunk,
        _django_18_query_walk_leaf,
        new_parent=output,
        connection=connection,
        negated=query.where.negated,
        model=query.model
    )

    # If there no child nodes, just wipe out the where
    if not output.children:
        output = None

    ret.where = output

    return ret
Пример #7
0
    def set_leaf(self, column, operator, value, is_pk_field, negated, lookup_name, namespace, target_field=None):
        assert column
        assert operator
        assert isinstance(is_pk_field, bool)
        assert isinstance(negated, bool)

        if operator == "iexact" and isinstance(target_field, AutoField):
            # When new instance is created, automatic primary key 'id' does not generate '_idx_iexact_id'.
            # As the primary key 'id' (AutoField) is integer and is always case insensitive,
            # we can deal with 'id_iexact=' query by using 'exact' rather than 'iexact'.
            operator = "exact"
            value = int(value)

        if is_pk_field:
            # If this is a primary key, we need to make sure that the value
            # we pass to the query is a datastore Key. We have to deal with IN queries here
            # because they aren't flattened until the DNF stage
            model = get_top_concrete_parent(target_field.model)
            table = model._meta.db_table

            if isinstance(value, (list, tuple)):
                value = [
                    datastore.Key.from_path(table, x, namespace=namespace)
                    for x in value if x
                ]
            else:
                if (operator == "isnull" and value is True) or not value:
                    # id=None will never return anything and
                    # Empty strings and 0 are forbidden as keys
                    self.will_never_return_results = True
                else:
                    value = datastore.Key.from_path(table, value, namespace=namespace)
            column = "__key__"

        # Do any special index conversions necessary to perform this lookup
        special_indexer = get_indexer(target_field, operator)

        if special_indexer:
            if is_pk_field:
                column = model._meta.pk.column
                value = unicode(value.id_or_name())

            add_special_index(target_field.model, column, special_indexer, operator, value)
            index_type = special_indexer.prepare_index_type(operator, value)
            value = special_indexer.prep_value_for_query(value)
            column = special_indexer.indexed_column_name(column, value, index_type)
            operator = special_indexer.prep_query_operator(operator)

        self.column = column
        self.operator = convert_operator(operator)
        self.value = value
        self.lookup_name = lookup_name
Пример #8
0
    def __init__(self, connection, query):
        self.model = query.model
        self.select = SelectCommand(connection, query, keys_only=True)
        self.query = self.select.query
        self.namespace = connection.ops.connection.settings_dict.get("NAMESPACE")

        # It seems query.tables is populated in most cases, but I have seen cases (albeit in testing)
        # where this isn't the case (particularly when not filtering on anything). In that case
        # fallback to the model table (perhaps we should do
        self.table_to_delete = (
            query.tables[0] if query.tables else
            utils.get_top_concrete_parent(query.model)._meta.db_table
        )
Пример #9
0
def unique_identifiers_from_entity(model, entity, ignore_pk=False, ignore_null_values=True):
    """
        Given an instance, this function returns a list of identifier strings that represent
        unique field/value combinations.
    """
    from djangae.db.utils import get_top_concrete_parent

    unique_combinations = _unique_combinations(model, ignore_pk)

    meta = model._meta

    identifiers = []
    for combination in unique_combinations:
        combo_identifiers = [[]]

        include_combination = True

        for field_name in combination:
            field = meta.get_field(field_name)

            if field.primary_key:
                value = entity.key().id_or_name()
            else:
                value = entity.get(field.column)  # Get the value from the entity

            # If ignore_null_values is True, then we don't include combinations where the value is None
            # or if the field is a multivalue field where None means no value (you can't store None in a list)
            if (value is None and ignore_null_values) or (not value and isinstance(value, (list, set))):
                include_combination = False
                break

            if not isinstance(value, (list, set)):
                value = [value]

            new_combo_identifers = []

            for existing in combo_identifiers:
                for v in value:
                    identifier = "{}:{}".format(field.column, _format_value_for_identifier(v))
                    new_combo_identifers.append(existing + [identifier])

            combo_identifiers = new_combo_identifers

        if include_combination:
            for ident in combo_identifiers:
                identifiers.append(get_top_concrete_parent(model)._meta.db_table + "|" + "|".join(ident))

    return identifiers
Пример #10
0
def _django_18_query_walk_leaf(node, negated, new_parent, connection, model):
    new_node = WhereNode()

    if not hasattr(node, "lhs"):
        raise NotSupportedError("Attempted probable subquery, these aren't supported on the datastore")

    # Leaf
    if hasattr(node.lhs, 'target'):
        # from Django 1.9, some node.lhs might not have a target attribute
        # as they might be wrapping date fields
        field = node.lhs.target
        operator = node.lookup_name
    else:
        field = node.lhs.lhs.target
        operator = node.lhs.lookup_name

        # This deals with things like datefield__month__gt=X which means from this point
        # on, operator will have two parts in that particular case and will probably need to
        # be dealt with by a special indexer
        if node.lookup_name != operator:
            operator = "{}__{}".format(operator, node.lookup_name)

    if get_top_concrete_parent(field.model) != get_top_concrete_parent(model):
        raise NotSupportedError("Cross-join where filters are not supported on the datastore")

    # Make sure we don't let people try to filter on a text field, otherwise they just won't
    # get any results!

    if field.db_type(connection) in ("bytes", "text"):
        raise NotSupportedError("You can't filter on text or blob fields on the datastore")

    if operator == "isnull" and field.model._meta.parents.values():
        raise NotSupportedError("isnull lookups on inherited relations aren't supported on the datastore")

    lhs = field.column

    try:
        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError("Attempted to run a subquery on the datastore")
        elif isinstance(node.rhs, ValuesListQuerySet):
            # We explicitly handle ValuesListQuerySet because of the
            # common case of pk__in=Something.objects.values_list("pk", flat=True)
            # this WILL execute another query, but that is to be expected on a
            # non-relational datastore.

            node.rhs = [ x for x in node.rhs ] # Evaluate the queryset
            rhs = node.process_rhs(None, connection) # Process the RHS as if it was a list

        elif isinstance(node.rhs, QuerySet):
            # In Django 1.9, ValuesListQuerySet doesn't exist anymore, and instead
            # values_list returns a QuerySet
            if node.rhs._iterable_class == FlatValuesListIterable:
                # if the queryset has FlatValuesListIterable as iterable class
                # then it's a flat list, and we just need to evaluate the
                # queryset converting it into a list
                node.rhs = [ x for x in node.rhs ]
            else:
                # otherwise, we try to get the PK from the queryset
                node.rhs = [ x.pk for x in node.rhs ]

            rhs = node.process_rhs(None, connection) # Process the RHS as if it was a list

        else:
            rhs = node.process_rhs(None, connection)
    except EmptyResultSet:
        if operator == 'in':
            # Deal with this later
            rhs = [ [] ]
        else:
            raise


    if operator in ('in', 'range'):
        rhs = rhs[-1]
    elif operator == 'isnull':
        rhs = node.rhs
    else:
        rhs = rhs[-1][0]

    new_node.set_leaf(
        lhs,
        operator,
        rhs,
        is_pk_field=field==model._meta.pk,
        negated=negated,
        namespace=connection.ops.connection.settings_dict.get("NAMESPACE"),
        target_field=field,
    )

    # For some reason, this test:
    # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
    # ends up with duplicate nodes in the where tree. I don't know why. But this
    # weirdly causes the datastore query to return nothing.
    # so here we don't add duplicate nodes, I can't think of a case where that would
    # change the query if it's under the same parent.
    if new_node in new_parent.children:
        return

    new_parent.children.append(new_node)
Пример #11
0
def _django_17_query_walk_leaf(node, negated, new_parent, connection, model):
    new_node = WhereNode()

    if not hasattr(node, "lhs"):
        raise NotSupportedError(
            "Attempted probable subquery, these aren't supported on the datastore"
        )

    # Leaf
    if get_top_concrete_parent(
            node.lhs.target.model) != get_top_concrete_parent(model):
        raise NotSupportedError(
            "Cross-join where filters are not supported on the datastore")

    field = node.lhs.target

    # Make sure we don't let people try to filter on a text field, otherwise they just won't
    # get any results!

    if field.db_type(connection) in ("bytes", "text"):
        raise NotSupportedError(
            "You can't filter on text or blob fields on the datastore")

    if node.lookup_name == "isnull" and field.model._meta.parents.values():
        raise NotSupportedError(
            "isnull lookups on inherited relations aren't supported on the datastore"
        )

    lhs = field.column

    try:
        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError(
                "Attempted to run a subquery on the datastore")
        elif isinstance(node.rhs, ValuesListQuerySet):
            # We explicitly handle ValuesListQuerySet because of the
            # common case of pk__in=Something.objects.values_list("pk", flat=True)
            # this WILL execute another query, but that is to be expected on a
            # non-relational datastore.

            node.rhs = [x for x in node.rhs]  # Evaluate the queryset
            rhs = node.process_rhs(
                None, connection)  # Process the RHS as if it was a list
        else:
            rhs = node.process_rhs(None, connection)
    except EmptyResultSet:
        if node.lookup_name == 'in':
            # Deal with this later
            rhs = [[]]
        else:
            raise

    if node.lookup_name in ('in', 'range'):
        rhs = rhs[-1]
    elif node.lookup_name == 'isnull':
        rhs = node.rhs
    else:
        rhs = rhs[-1][0]

    new_node.set_leaf(
        lhs,
        node.lookup_name,
        rhs,
        is_pk_field=field == model._meta.pk,
        negated=negated,
        namespace=connection.ops.connection.settings_dict.get("NAMESPACE"),
        target_field=node.lhs.target,
    )

    # For some reason, this test:
    # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
    # ends up with duplicate nodes in the where tree. I don't know why. But this
    # weirdly causes the datastore query to return nothing.
    # so here we don't add duplicate nodes, I can't think of a case where that would
    # change the query if it's under the same parent.
    if new_node in new_parent.children:
        return

    new_parent.children.append(new_node)
Пример #12
0
 def _generate_kind_name(self, model, column):
     return "_djangae_idx_{}_{}".format(
         get_top_concrete_parent(model)._meta.db_table,
         column
     )
Пример #13
0
    def _where_node_leaf_callback(self, node, negated, new_parent, connection, model, compiler):
        new_node = WhereNode()

        def convert_rhs_op(node):
            db_rhs = getattr(node.rhs, '_db', None)
            if db_rhs is not None and db_rhs != connection.alias:
                raise ValueError(
                    "Subqueries aren't allowed across different databases. Force "
                    "the inner query to be evaluated using `list(inner_query)`."
                )

            value = node.get_rhs_op(connection, node.rhs)
            operator = value.split()[0].lower().strip()
            if operator == 'between':
                operator = 'range'
            return operator

        if not hasattr(node, "lhs"):
            raise NotSupportedError("Attempted probable subquery, these aren't supported on the datastore")

        # Although we do nothing with this. We need to call it as many lookups
        # perform validation etc.
        if not hasattr(node.rhs, "_as_sql"): # Don't call on querysets
            try:
                node.process_rhs(compiler, connection)
            except EmptyResultSet:
                if node.lookup_name == 'in':
                    node.rhs = []
                else:
                    raise

        # Leaf
        if hasattr(node.lhs, 'target'):
            # from Django 1.9, some node.lhs might not have a target attribute
            # as they might be wrapping date fields
            field = node.lhs.target
            operator = convert_rhs_op(node)
        else:
            field = node.lhs.lhs.target
            operator = convert_rhs_op(node)

            # This deals with things like datefield__month__gt=X which means from this point
            # on, operator will have two parts in that particular case and will probably need to
            # be dealt with by a special indexer
            if node.lookup_name != node.lhs.lookup_name:
                operator = "{}__{}".format(node.lhs.lookup_name, node.lookup_name)

        if get_top_concrete_parent(field.model) != get_top_concrete_parent(model):
            raise NotSupportedError("Cross-join where filters are not supported on the datastore")

        # Make sure we don't let people try to filter on a text field, otherwise they just won't
        # get any results!

        if field.db_type(connection) in ("bytes", "text"):
            raise NotSupportedError("You can't filter on text or blob fields on the datastore")

        if operator == "isnull" and field.model._meta.parents.values():
            raise NotSupportedError("isnull lookups on inherited relations aren't supported on the datastore")

        lhs = field.column

        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError("Attempted to run a subquery on the datastore")
        elif isinstance(node.rhs, ValuesListQuerySet):
            # We explicitly handle ValuesListQuerySet because of the
            # common case of pk__in=Something.objects.values_list("pk", flat=True)
            # this WILL execute another query, but that is to be expected on a
            # non-relational datastore.

            rhs = [x for x in node.rhs]  # Evaluate the queryset

        elif isinstance(node.rhs, QuerySet):
            # In Django 1.9, ValuesListQuerySet doesn't exist anymore, and instead
            # values_list returns a QuerySet
            if node.rhs._iterable_class == FlatValuesListIterable:
                # if the queryset has FlatValuesListIterable as iterable class
                # then it's a flat list, and we just need to evaluate the
                # queryset converting it into a list
                rhs = [x for x in node.rhs]
            else:
                # otherwise, we try to get the PK from the queryset
                rhs = list(node.rhs.values_list('pk', flat=True))
        else:
            rhs = node.rhs

        was_iter = hasattr(node.rhs, "__iter__")
        rhs = node.get_db_prep_lookup(rhs, connection)[-1]
        if rhs and not was_iter and hasattr(rhs, "__iter__"):
            rhs = rhs[0]

        new_node.set_leaf(
            lhs,
            operator,
            rhs,
            is_pk_field=field==model._meta.pk,
            negated=negated,
            lookup_name=node.lookup_name,
            namespace=connection.ops.connection.settings_dict.get("NAMESPACE"),
            target_field=field,
        )

        # For some reason, this test:
        # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
        # ends up with duplicate nodes in the where tree. I don't know why. But this
        # weirdly causes the datastore query to return nothing.
        # so here we don't add duplicate nodes, I can't think of a case where that would
        # change the query if it's under the same parent.
        if new_node in new_parent.children:
            return

        new_parent.children.append(new_node)
Пример #14
0
def _django_17_query_walk_leaf(node, negated, new_parent, connection, model):
    new_node = WhereNode()

    if not hasattr(node, "lhs"):
        raise NotSupportedError("Attempted probable subquery, these aren't supported on the datastore")

    # Leaf
    if get_top_concrete_parent(node.lhs.target.model) != get_top_concrete_parent(model):
        raise NotSupportedError("Cross-join where filters are not supported on the datastore")

    field = node.lhs.target

    # Make sure we don't let people try to filter on a text field, otherwise they just won't
    # get any results!

    if field.db_type(connection) in ("bytes", "text"):
        raise NotSupportedError("You can't filter on text or blob fields on the datastore")

    if node.lookup_name == "isnull" and field.model._meta.parents.values():
        raise NotSupportedError("isnull lookups on inherited relations aren't supported on the datastore")

    lhs = field.column

    try:
        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError("Attempted to run a subquery on the datastore")
        else:
            rhs = node.process_rhs(None, connection)
    except EmptyResultSet:
        if node.lookup_name == 'in':
            # Deal with this later
            rhs = [ [] ]
        else:
            raise

    if node.lookup_name in ('in', 'range'):
        rhs = rhs[-1]
    elif node.lookup_name == 'isnull':
        rhs = node.rhs
    else:
        rhs = rhs[-1][0]

    new_node.set_leaf(
        lhs,
        node.lookup_name,
        rhs,
        is_pk_field=field==model._meta.pk,
        negated=negated,
        target_field=node.lhs.target,
    )

    # For some reason, this test:
    # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
    # ends up with duplicate nodes in the where tree. I don't know why. But this
    # weirdly causes the datastore query to return nothing.
    # so here we don't add duplicate nodes, I can't think of a case where that would
    # change the query if it's under the same parent.
    if new_node in new_parent.children:
        return

    new_parent.children.append(new_node)
Пример #15
0
def _django_17_query_walk_leaf(node, negated, new_parent, connection, model):
    new_node = WhereNode()

    if not hasattr(node, "lhs"):
        raise NotSupportedError("Attempted probable subquery, these aren't supported on the datastore")

    # Leaf
    if get_top_concrete_parent(node.lhs.target.model) != get_top_concrete_parent(model):
        raise NotSupportedError("Cross-join where filters are not supported on the datastore")

    field = node.lhs.target

    # Make sure we don't let people try to filter on a text field, otherwise they just won't
    # get any results!

    if field.db_type(connection) in ("bytes", "text"):
        raise NotSupportedError("You can't filter on text or blob fields on the datastore")

    if node.lookup_name == "isnull" and field.model._meta.parents.values():
        raise NotSupportedError("isnull lookups on inherited relations aren't supported on the datastore")

    lhs = field.column

    try:
        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError("Attempted to run a subquery on the datastore")
        elif isinstance(node.rhs, ValuesListQuerySet):
            # We explicitly handle ValuesListQuerySet because of the
            # common case of pk__in=Something.objects.values_list("pk", flat=True)
            # this WILL execute another query, but that is to be expected on a
            # non-relational datastore.

            node.rhs = [ x for x in node.rhs ] # Evaluate the queryset
            rhs = node.process_rhs(None, connection) # Process the RHS as if it was a list
        else:
            rhs = node.process_rhs(None, connection)
    except EmptyResultSet:
        if node.lookup_name == 'in':
            # Deal with this later
            rhs = [ [] ]
        else:
            raise

    if node.lookup_name in ('in', 'range'):
        rhs = rhs[-1]
    elif node.lookup_name == 'isnull':
        rhs = node.rhs
    else:
        rhs = rhs[-1][0]

    new_node.set_leaf(
        lhs,
        node.lookup_name,
        rhs,
        is_pk_field=field==model._meta.pk,
        negated=negated,
        namespace=connection.ops.connection.settings_dict.get("NAMESPACE"),
        target_field=node.lhs.target,
    )

    # For some reason, this test:
    # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
    # ends up with duplicate nodes in the where tree. I don't know why. But this
    # weirdly causes the datastore query to return nothing.
    # so here we don't add duplicate nodes, I can't think of a case where that would
    # change the query if it's under the same parent.
    if new_node in new_parent.children:
        return

    new_parent.children.append(new_node)
Пример #16
0
def _transform_query_17(connection, kind, query):
    from django.db.models.sql.datastructures import Date, DateTime

    if isinstance(query.where, EmptyWhere):
        # Empty where means return nothing!
        raise EmptyResultSet()

    # Check for joins, we ignore select related tables as they aren't actually used (the connector marks select
    # related as unsupported in its features)
    tables = [k for k, v in query.alias_refcount.items() if v]
    inherited_tables = set(
        [x._meta.db_table for x in query.model._meta.parents])
    select_related_tables = set([y[0][0] for y in query.related_select_cols])
    tables = set(tables) - inherited_tables - select_related_tables

    if len(tables) > 1:
        raise NotSupportedError("""
            The appengine database connector does not support JOINs. The requested join map follows\n
            %s
        """ % query.join_map)

    ret = Query(query.model, kind)
    ret.connection = connection

    # Add the root concrete table as the source table
    root_table = get_top_concrete_parent(query.model)._meta.db_table
    ret.add_source_table(root_table)

    # Extract the ordering of the query results
    for order_col in _extract_ordering_from_query_17(query):
        ret.add_order_by(order_col)

    # Extract any projected columns (values/values_list/only/defer)
    for projected_col in _extract_projected_columns_from_query_17(query):
        ret.add_projected_column(projected_col)

    for potential_annotation in query.select:
        col = getattr(potential_annotation, "col", None)
        if not col:
            continue

        if isinstance(col, (Date, DateTime)):
            ret.add_annotation(col.col[-1], col)

    # Add any extra selects
    for col, select in query.extra_select.items():
        ret.add_extra_select(col, select[0])

    # This must happen after extracting projected cols
    if query.distinct:
        ret.set_distinct(list(query.distinct_fields))

    # Extract any query offsets and limits
    ret.low_mark = query.low_mark
    ret.high_mark = query.high_mark

    output = WhereNode()
    output.connector = query.where.connector

    _walk_django_where(query,
                       _django_17_query_walk_trunk,
                       _django_17_query_walk_leaf,
                       new_parent=output,
                       connection=connection,
                       model=query.model)

    # If there no child nodes, just wipe out the where
    if not output.children:
        output = None

    ret.where = output

    return ret
Пример #17
0
 def _generate_kind_name(self, model, column):
     return "_djangae_idx_{}_{}".format(
         get_top_concrete_parent(model)._meta.db_table, column)
Пример #18
0
    def set_leaf(
            self, column, operator, value, is_pk_field, negated, lookup_name, namespace,
            target_field=None):

        assert column
        assert operator
        assert isinstance(is_pk_field, bool)
        assert isinstance(negated, bool)

        if operator == "iexact" and isinstance(target_field, AutoField):
            # When new instance is created, automatic primary key 'id'
            # does not generate '_idx_iexact_id'.
            # As the primary key 'id' (AutoField) is integer and is always case insensitive,
            # we can deal with 'id_iexact=' query by using 'exact' rather than 'iexact'.
            operator = "exact"
            value = int(value)

        if is_pk_field:
            # If this is a primary key, we need to make sure that the value
            # we pass to the query is a datastore Key. We have to deal with IN queries here
            # because they aren't flattened until the DNF stage
            model = get_top_concrete_parent(target_field.model)
            table = model._meta.db_table

            if isinstance(value, (list, tuple)):
                value = [
                    rpc.Key.from_path(table, x, namespace=namespace)
                    for x in value if x
                ]
            else:
                # Django 1.11 has operators as symbols, earlier versions use "exact" etc.
                if (operator == "isnull" and value is True) or (operator in ("exact", "lt", "lte", "<", "<=", "=") and not value):
                    # id=None will never return anything and
                    # Empty strings and 0 are forbidden as keys
                    self.will_never_return_results = True
                elif operator in ("gt", "gte", ">", ">=") and not value:
                    # If the value is 0 or "", then we need to manipulate the value and operator here to
                    # get the right result (given that both are invalid keys) so for both we return
                    # >= 1 or >= "\0" for strings
                    if isinstance(value, six.integer_types):
                        value = 1
                    else:
                        value = "\0"

                    value = rpc.Key.from_path(table, value, namespace=namespace)
                    operator = "gte"
                else:
                    value = rpc.Key.from_path(table, value, namespace=namespace)
            column = "__key__"

        # Do any special index conversions necessary to perform this lookup
        special_indexer = get_indexer(target_field, operator)

        if special_indexer:
            if is_pk_field:
                column = model._meta.pk.column
                value = unicode(value.id_or_name())

            add_special_index(target_field.model, column, special_indexer, operator, value)
            index_type = special_indexer.prepare_index_type(operator, value)
            value = special_indexer.prep_value_for_query(
                value,
                model=target_field.model,
                column=column,
                connection=connections[self.using]
            )
            column = special_indexer.indexed_column_name(column, value, index_type)
            operator = special_indexer.prep_query_operator(operator)

        self.column = column
        self.operator = convert_operator(operator)
        self.value = value
        self.lookup_name = lookup_name
Пример #19
0
    def _where_node_leaf_callback(self, node, negated, new_parent, connection, model, compiler):
        new_node = WhereNode(new_parent.using)

        def convert_rhs_op(node):
            db_rhs = getattr(node.rhs, '_db', None)
            if db_rhs is not None and db_rhs != connection.alias:
                raise ValueError(
                    "Subqueries aren't allowed across different databases. Force "
                    "the inner query to be evaluated using `list(inner_query)`."
                )

            value = node.get_rhs_op(connection, node.rhs)
            operator = value.split()[0].lower().strip()
            if operator == 'between':
                operator = 'range'
            return operator

        if not hasattr(node, "lhs"):
            raise NotSupportedError("Attempted probable subquery, these aren't supported on the Datastore")

        # Don't call on querysets
        if not hasattr(node.rhs, "_as_sql") and not isinstance(node.rhs, DjangoQuery):
            try:
                # Although we do nothing with this. We need to call it as many lookups
                # perform validation etc.
                node.process_rhs(compiler, connection)
            except EmptyResultSet:
                if node.lookup_name == 'in':
                    node.rhs = []
                else:
                    raise

        # Leaf
        if hasattr(node.lhs, 'target'):
            # from Django 1.9, some node.lhs might not have a target attribute
            # as they might be wrapping date fields
            field = node.lhs.target
            operator = convert_rhs_op(node)
        elif isinstance(node.lhs, Aggregate):
            raise NotSupportedError("Aggregate filters are not supported on the Datastore")
        else:
            field = node.lhs.lhs.target
            operator = convert_rhs_op(node)

            # This deals with things like datefield__month__gt=X which means from this point
            # on, operator will have two parts in that particular case and will probably need to
            # be dealt with by a special indexer
            if node.lookup_name != node.lhs.lookup_name:
                operator = "{}__{}".format(node.lhs.lookup_name, node.lookup_name)

        if get_top_concrete_parent(field.model) != get_top_concrete_parent(model):
            raise NotSupportedError("Cross-join where filters are not supported on the Datastore")

        # Make sure we don't let people try to filter on a text field, otherwise they just won't
        # get any results!

        lookup_supports_text = getattr(node, "lookup_supports_text", False)

        if field.db_type(connection) in ("bytes", "text") and not lookup_supports_text:
            raise NotSupportedError("You can't filter on text or blob fields on the Datastore")

        if operator == "isnull" and field.model._meta.parents.values():
            raise NotSupportedError("isnull lookups on inherited relations aren't supported on the Datastore")

        lhs = field.column

        if hasattr(node.rhs, "get_compiler"):
            if len(node.rhs.select) == 1:
                # In Django >= 1.11 this is a values list type query, which we explicitly handle
                # because of the common case of pk__in=Something.objects.values_list("pk", flat=True)
                qs = QuerySet(query=node.rhs, using=self.connection.alias)

                # We make the query for the values, but wrap in a list to trick the
                # was_iter code below. This whole set of if/elif statements needs rethinking!
                rhs = [list(qs.values_list("pk", flat=True))]
            else:
                # This is a subquery
                raise NotSupportedError("Attempted to run a subquery on the Datastore")
        elif isinstance(node.rhs, ValuesListQuerySet):
            # We explicitly handle ValuesListQuerySet because of the
            # common case of pk__in=Something.objects.values_list("pk", flat=True)
            # this WILL execute another query, but that is to be expected on a
            # non-relational database.

            rhs = [x for x in node.rhs]  # Evaluate the queryset

        elif isinstance(node.rhs, QuerySet):
            # In Django 1.9, ValuesListQuerySet doesn't exist anymore, and instead
            # values_list returns a QuerySet
            if node.rhs._iterable_class == FlatValuesListIterable:
                # if the queryset has FlatValuesListIterable as iterable class
                # then it's a flat list, and we just need to evaluate the
                # queryset converting it into a list
                rhs = [x for x in node.rhs]
            else:
                # otherwise, we try to get the PK from the queryset
                rhs = list(node.rhs.values_list('pk', flat=True))
        else:
            rhs = node.rhs

        was_iter = hasattr(node.rhs, "__iter__")
        rhs = node.get_db_prep_lookup(rhs, connection)[-1]
        if rhs and not was_iter and hasattr(rhs, "__iter__"):
            rhs = rhs[0]

        new_node.set_leaf(
            lhs,
            operator,
            rhs,
            is_pk_field=field==model._meta.pk,
            negated=negated,
            lookup_name=node.lookup_name,
            namespace=connection.ops.connection.settings_dict.get("NAMESPACE"),
            target_field=field,
        )

        # For some reason, this test:
        # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
        # ends up with duplicate nodes in the where tree. I don't know why. But this
        # weirdly causes the Datastore query to return nothing.
        # so here we don't add duplicate nodes, I can't think of a case where that would
        # change the query if it's under the same parent.
        if new_node in new_parent.children:
            return

        new_parent.children.append(new_node)
Пример #20
0
def _django_18_query_walk_leaf(node, negated, new_parent, connection, model):
    new_node = WhereNode()

    if not hasattr(node, "lhs"):
        raise NotSupportedError(
            "Attempted probable subquery, these aren't supported on the datastore"
        )

    # Leaf
    if hasattr(node.lhs, 'target'):
        # from Django 1.9, some node.lhs might not have a target attribute
        # as they might be wrapping date fields
        field = node.lhs.target
        operator = node.lookup_name
    else:
        field = node.lhs.lhs.target
        operator = node.lhs.lookup_name

        # This deals with things like datefield__month__gt=X which means from this point
        # on, operator will have two parts in that particular case and will probably need to
        # be dealt with by a special indexer
        if node.lookup_name != operator:
            operator = "{}__{}".format(operator, node.lookup_name)

    if get_top_concrete_parent(field.model) != get_top_concrete_parent(model):
        raise NotSupportedError(
            "Cross-join where filters are not supported on the datastore")

    # Make sure we don't let people try to filter on a text field, otherwise they just won't
    # get any results!

    if field.db_type(connection) in ("bytes", "text"):
        raise NotSupportedError(
            "You can't filter on text or blob fields on the datastore")

    if operator == "isnull" and field.model._meta.parents.values():
        raise NotSupportedError(
            "isnull lookups on inherited relations aren't supported on the datastore"
        )

    lhs = field.column

    try:
        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError(
                "Attempted to run a subquery on the datastore")
        elif isinstance(node.rhs, ValuesListQuerySet):
            # We explicitly handle ValuesListQuerySet because of the
            # common case of pk__in=Something.objects.values_list("pk", flat=True)
            # this WILL execute another query, but that is to be expected on a
            # non-relational datastore.

            node.rhs = [x for x in node.rhs]  # Evaluate the queryset
            rhs = node.process_rhs(
                None, connection)  # Process the RHS as if it was a list

        elif isinstance(node.rhs, QuerySet):
            # In Django 1.9, ValuesListQuerySet doesn't exist anymore, and instead
            # values_list returns a QuerySet
            if node.rhs._iterable_class == FlatValuesListIterable:
                # if the queryset has FlatValuesListIterable as iterable class
                # then it's a flat list, and we just need to evaluate the
                # queryset converting it into a list
                node.rhs = [x for x in node.rhs]
            else:
                # otherwise, we try to get the PK from the queryset
                node.rhs = [x.pk for x in node.rhs]

            rhs = node.process_rhs(
                None, connection)  # Process the RHS as if it was a list

        else:
            rhs = node.process_rhs(None, connection)
    except EmptyResultSet:
        if operator == 'in':
            # Deal with this later
            rhs = [[]]
        else:
            raise

    if operator in ('in', 'range'):
        rhs = rhs[-1]
    elif operator == 'isnull':
        rhs = node.rhs
    else:
        rhs = rhs[-1][0]

    new_node.set_leaf(
        lhs,
        operator,
        rhs,
        is_pk_field=field == model._meta.pk,
        negated=negated,
        namespace=connection.ops.connection.settings_dict.get("NAMESPACE"),
        target_field=field,
    )

    # For some reason, this test:
    # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
    # ends up with duplicate nodes in the where tree. I don't know why. But this
    # weirdly causes the datastore query to return nothing.
    # so here we don't add duplicate nodes, I can't think of a case where that would
    # change the query if it's under the same parent.
    if new_node in new_parent.children:
        return

    new_parent.children.append(new_node)
Пример #21
0
def _transform_query_19(connection, kind, query):
    from django.db.models.sql.where import NothingNode, WhereNode as DjangoWhereNode

    # It could either be a NothingNode, or a WhereNode(AND NothingNode)
    if (isinstance(query.where, NothingNode)
            or (isinstance(query.where, DjangoWhereNode)
                and len(query.where.children) == 1
                and isinstance(query.where.children[0], NothingNode))):
        # Empty where means return nothing!
        raise EmptyResultSet()

    ret = Query(query.model, kind)
    ret.connection = connection

    # Add the root concrete table as the source table
    root_table = get_top_concrete_parent(query.model)._meta.db_table
    ret.add_source_table(root_table)

    # Extract the ordering of the query results
    for order_col in _extract_ordering_from_query_18(query):
        ret.add_order_by(order_col)

    # Extract any projected columns (values/values_list/only/defer)
    for projected_col in _extract_projected_columns_from_query_18(query):
        ret.add_projected_column(projected_col)

    # Add any extra selects
    for col, select in query.extra_select.items():
        ret.add_extra_select(col, select[0])

    if query.distinct:
        # This must happen after extracting projected cols
        ret.set_distinct(list(query.distinct_fields))

    # Process annotations!
    if query.annotation_select:
        for k, v in query.annotation_select.items():
            ret.add_annotation(k, v)

    # Extract any query offsets and limits
    ret.low_mark = query.low_mark
    ret.high_mark = query.high_mark

    output = WhereNode()
    output.connector = query.where.connector

    _walk_django_where(query,
                       _django_18_query_walk_trunk,
                       _django_18_query_walk_leaf,
                       new_parent=output,
                       connection=connection,
                       negated=query.where.negated,
                       model=query.model)

    # If there no child nodes, just wipe out the where
    if not output.children:
        output = None

    ret.where = output

    return ret
Пример #22
0
    def set_leaf(self,
                 column,
                 operator,
                 value,
                 is_pk_field,
                 negated,
                 namespace,
                 target_field=None):
        assert column
        assert operator
        assert isinstance(is_pk_field, bool)
        assert isinstance(negated, bool)

        if operator == "iexact" and isinstance(target_field, AutoField):
            # When new instance is created, automatic primary key 'id' does not generate '_idx_iexact_id'.
            # As the primary key 'id' (AutoField) is integer and is always case insensitive,
            # we can deal with 'id_iexact=' query by using 'exact' rather than 'iexact'.
            operator = "exact"
            value = int(value)

        if is_pk_field:
            # If this is a primary key, we need to make sure that the value
            # we pass to the query is a datastore Key. We have to deal with IN queries here
            # because they aren't flattened until the DNF stage
            model = get_top_concrete_parent(target_field.model)
            table = model._meta.db_table

            if isinstance(value, (list, tuple)):
                value = [
                    datastore.Key.from_path(table, x, namespace=namespace)
                    for x in value if x
                ]
            else:
                if operator == "isnull" and value is True:
                    # FIXME: Strictly, this isn't correct, this could be one of several branches
                    # but id=None filters are silly anyway. This should be moved to after normalization..
                    # probably. This fixes a test in Django which does this in get_or_create for some reason
                    raise EmptyResultSet()

                if not value:
                    # Empty strings and 0 are forbidden as keys
                    # so make this an impossible filter
                    # FIXME: This is a hack! It screws with the ordering
                    # because it's an inequality. Instead we should wipe this
                    # filter out when preprocessing in the DNF (because it's impossible)
                    value = datastore.Key.from_path('', 1)
                    operator = '<'
                else:
                    value = datastore.Key.from_path(table,
                                                    value,
                                                    namespace=namespace)
            column = "__key__"

        # Do any special index conversions necessary to perform this lookup
        primary_operation = operator.split("__")[0]
        special_indexer = REQUIRES_SPECIAL_INDEXES.get(primary_operation)

        if special_indexer:
            if is_pk_field:
                column = model._meta.pk.column
                value = unicode(value.id_or_name())

            add_special_index(target_field.model, column, primary_operation,
                              value)
            index_type = special_indexer.prepare_index_type(operator, value)
            value = special_indexer.prep_value_for_query(value)
            column = special_indexer.indexed_column_name(
                column, value, index_type)
            operator = special_indexer.prep_query_operator(operator)

        self.column = column
        self.operator = convert_operator(operator)
        self.value = value
Пример #23
0
def _transform_query_17(connection, kind, query):
    from django.db.models.sql.datastructures import Date, DateTime

    if isinstance(query.where, EmptyWhere):
        # Empty where means return nothing!
        raise EmptyResultSet()

    # Check for joins, we ignore select related tables as they aren't actually used (the connector marks select
    # related as unsupported in its features)
    tables = [ k for k, v in query.alias_refcount.items() if v ]
    inherited_tables = set([x._meta.db_table for x in query.model._meta.parents ])
    select_related_tables = set([y[0][0] for y in query.related_select_cols ])
    tables = set(tables) - inherited_tables - select_related_tables

    if len(tables) > 1:
        raise NotSupportedError("""
            The appengine database connector does not support JOINs. The requested join map follows\n
            %s
        """ % query.join_map)

    ret = Query(query.model, kind)
    ret.connection = connection

    # Add the root concrete table as the source table
    root_table = get_top_concrete_parent(query.model)._meta.db_table
    ret.add_source_table(root_table)

    # Extract the ordering of the query results
    for order_col in _extract_ordering_from_query_17(query):
        ret.add_order_by(order_col)

    # Extract any projected columns (values/values_list/only/defer)
    for projected_col in _extract_projected_columns_from_query_17(query):
        ret.add_projected_column(projected_col)

    for potential_annotation in query.select:
        col = getattr(potential_annotation, "col", None)
        if not col:
            continue

        if isinstance(col, (Date, DateTime)):
            ret.add_annotation(col.col[-1], col)

    # Add any extra selects
    for col, select in query.extra_select.items():
        ret.add_extra_select(col, select[0])

    # This must happen after extracting projected cols
    if query.distinct:
        ret.set_distinct(list(query.distinct_fields))

    # Extract any query offsets and limits
    ret.low_mark = query.low_mark
    ret.high_mark = query.high_mark

    output = WhereNode()
    output.connector = query.where.connector

    _walk_django_where(
        query,
        _django_17_query_walk_trunk,
        _django_17_query_walk_leaf,
        new_parent=output,
        connection=connection,
        model=query.model
    )

    # If there no child nodes, just wipe out the where
    if not output.children:
        output = None

    ret.where = output

    return ret
def _django_17_query_walk_leaf(node, negated, new_parent, connection, model):
    new_node = WhereNode()

    if not hasattr(node, "lhs"):
        raise NotSupportedError("Attempted probable subquery, these aren't supported on the datastore")

    # Leaf
    if get_top_concrete_parent(node.lhs.target.model) != get_top_concrete_parent(model):
        raise NotSupportedError("Cross-join where filters are not supported on the datastore")

    field = node.lhs.target

    # Make sure we don't let people try to filter on a text field, otherwise they just won't
    # get any results!

    if field.db_type(connection) in ("bytes", "text"):
        raise NotSupportedError("You can't filter on text or blob fields on the datastore")

    if node.lookup_name == "isnull" and field.model._meta.parents.values():
        raise NotSupportedError("isnull lookups on inherited relations aren't supported on the datastore")

    lhs = field.column

    try:
        if hasattr(node.rhs, "get_compiler"):
            # This is a subquery
            raise NotSupportedError("Attempted to run a subquery on the datastore")
        else:
            rhs = node.process_rhs(None, connection)
    except EmptyResultSet:
        if node.lookup_name == 'in':
            # Deal with this later
            rhs = [ [] ]
        else:
            raise

    if node.lookup_name in ('in', 'range'):
        rhs = rhs[-1]
    elif node.lookup_name == 'isnull':
        rhs = node.rhs
    else:
        rhs = rhs[-1][0]

    new_node.set_leaf(
        lhs,
        node.lookup_name,
        rhs,
        is_pk_field=field==model._meta.pk,
        negated=negated,
        target_field=node.lhs.target,
    )

    # For some reason, this test:
    # test_update_with_related_manager (get_or_create.tests.UpdateOrCreateTests)
    # ends up with duplicate nodes in the where tree. I don't know why. But this
    # weirdly causes the datastore query to return nothing.
    # so here we don't add duplicate nodes, I can't think of a case where that would
    # change the query if it's under the same parent.
    if new_node in new_parent.children:
        return

    new_parent.children.append(new_node)