Exemplo n.º 1
0
    def get(self, *q_objs, **query):
        """Retrieve the the matching object raising
        :class:`~mongoengine.queryset.MultipleObjectsReturned` or
        `DocumentName.MultipleObjectsReturned` exception if multiple results
        and :class:`~mongoengine.queryset.DoesNotExist` or
        `DocumentName.DoesNotExist` if no results are found.
        """

        queryset = self.clone()
        queryset = queryset.order_by().limit(2)
        queryset = queryset.filter(*q_objs, **query)

        future = get_future(self)

        def _get_cb(done_future):
            docs = done_future.result()
            if len(docs) < 1:
                msg = ("%s matching query does not exist." %
                       queryset._document._class_name)
                future.set_exception(queryset._document.DoesNotExist(msg))

            elif len(docs) > 1:
                msg = 'More than 1 item returned'
                future.set_exception(
                    queryset._document.MultipleObjectsReturned(msg))
            else:
                future.set_result(docs[0])

        list_future = queryset.to_list(length=2)
        list_future.add_done_callback(_get_cb)  # pragma no cover
        return future
Exemplo n.º 2
0
    def aggregate_sum(self, field):
        """Sum over the values of the specified field.

        :param field: the field to sum over; use dot-notation to refer to
            embedded document fields

        This method is more performant than the regular `sum`, because it uses
        the aggregation framework instead of map-reduce.
        """
        cursor = self._document._get_collection().aggregate([{
            '$match':
            self._query
        }, {
            '$group': {
                '_id': 'sum',
                'total': {
                    '$sum': '$' + field
                }
            }
        }])

        fn_future = cursor.fetch_next
        future = get_future(self)

        def sum_cb(fn_future):
            if fn_future.result():
                doc = cursor.next_object()
                r = doc['total']
            else:
                r = 0

            future.set_result(r)

        fn_future.add_done_callback(sum_cb)
        return future
Exemplo n.º 3
0
    def test_get_future(self):
        class TestClass:
            @classmethod
            def _get_db(cls):
                db = Mock()
                db._framework = asyncio_framework
                return db

        future = metaprogramming.get_future(TestClass())
        self.assertIsInstance(future, Future)
Exemplo n.º 4
0
    def sum(self, field):
        """Sum over the values of the specified field.

        :param field: the field to sum over; use dot-notation to refer to
            embedded document fields
        """
        map_func = """
            function() {
                var path = '{{~%(field)s}}'.split('.'),
                field = this;

                for (p in path) {
                    if (typeof field != 'undefined')
                       field = field[path[p]];
                    else
                       break;
                }

                if (field && field.constructor == Array) {
                    field.forEach(function(item) {
                        emit(1, item||0);
                    });
                } else if (typeof field != 'undefined') {
                    emit(1, field||0);
                }
            }
        """ % dict(field=field)

        reduce_func = Code("""
            function(key, values) {
                var sum = 0;
                for (var i in values) {
                    sum += values[i];
                }
                return sum;
            }
        """)

        mr_future = self.inline_map_reduce(map_func, reduce_func)
        future = get_future(self)

        def sum_cb(mr_future):
            results = mr_future.result()

            for result in results:
                r = result.value
                break
            else:
                r = 0

            future.set_result(r)

        mr_future.add_done_callback(sum_cb)
        return future
Exemplo n.º 5
0
    def modify(self, query={}, **update):
        """Perform an atomic update of the document in the database and reload
        the document object using updated version.

        Returns True if the document has been updated or False if the document
        in the database doesn't match the query.

        .. note:: All unsaved changes that have been made to the document are
            rejected if the method returns True.

        :param query: the update will be performed only if the document in the
            database matches the query
        :param update: Django-style update keyword arguments
        """

        if self.pk is None:
            raise InvalidDocumentError(
                "The document does not have a primary key.")

        id_field = self._meta["id_field"]
        query = query.copy() if isinstance(query,
                                           dict) else query.to_query(self)

        if id_field not in query:
            query[id_field] = self.pk
        elif query[id_field] != self.pk:
            msg = "Invalid document modify query: "
            msg += "it must modify only this document."
            raise InvalidQueryError(msg)

        updated_future = self._qs(**query).modify(new=True, **update)
        ret_future = get_future(self)

        def updated_cb(updated_future):
            try:
                updated = updated_future.result()
                if updated is None:
                    ret_future.set_result(False)
                    return

                for field in self._fields_ordered:
                    setattr(self, field, self._reload(field, updated[field]))

                self._changed_fields = updated._changed_fields
                self._created = False
                ret_future.set_result(True)
                return
            except Exception as e:
                ret_future.set_exception(e)

        updated_future.add_done_callback(updated_cb)
        return ret_future
Exemplo n.º 6
0
    def inline_map_reduce(self, map_f, reduce_f, **mr_kwargs):
        """Perform a map/reduce query using the current query spec
        and ordering. While ``map_reduce`` respects ``QuerySet`` chaining,
        it must be the last call made, as it does not return a maleable
        ``QuerySet``.

        :param map_f: map function, as :class:`~bson.code.Code` or string
        :param reduce_f: reduce function, as
                         :class:`~bson.code.Code` or string

        :param mr_kwargs: Arguments for mongodb map_reduce
           see: https://docs.mongodb.com/manual/reference/command/mapReduce/
           for more information

        Returns a generator of MapReduceDocument with the map/reduce results.

        .. note::

           This method only works with inline map/reduce. If you want to
           send the output to a collection use
           :meth:`~SanicMongo.queryset.Queryset.map_reduce`.
        """

        queryset = self.clone()

        if mr_kwargs.get('out') and mr_kwargs.get('out') != 'inline':
            msg = 'inline_map_reduce only supports inline output. '
            msg += 'To send the result to a collection use map_reduce'
            raise OperationError(msg)

        map_f = self._get_code(map_f)
        reduce_f = self._get_code(reduce_f)

        mr_kwargs.update({'query': queryset._query})

        if mr_kwargs.get('finalize'):
            mr_kwargs['finalize'] = self._get_code(mr_kwargs['finalize'])

        mr_future = queryset._collection.inline_map_reduce(
            map_f, reduce_f, **mr_kwargs)
        future = get_future(self)

        def inline_mr_cb(result_future):
            result = result_future.result()
            gen = (MapReduceDocument(queryset._document, queryset._collection,
                                     doc['_id'], doc['value'])
                   for doc in result)
            future.set_result(gen)

        mr_future.add_done_callback(inline_mr_cb)
        return future
Exemplo n.º 7
0
    def insert(self, doc_or_docs, load_bulk=True, write_concern=None):
        """bulk insert documents

        :param doc_or_docs: a document or list of documents to be inserted
        :param load_bulk (optional): If True returns the list of document
            instances
        :param write_concern: Extra keyword arguments are passed down to
                :meth:`~pymongo.collection.Collection.insert`
                which will be used as options for the resultant
                ``getLastError`` command.  For example,
                ``insert(..., {w: 2, fsync: True})`` will wait until at least
                two servers have recorded the write and will force an fsync on
                each server being written to.

        By default returns document instances, set ``load_bulk`` to False to
        return just ``ObjectIds``
        """

        super_insert = BaseQuerySet.insert
        async_in_bulk = self.in_bulk
        # this sync method is not really sync, it uses motor sockets and
        # greenlets events, but looks like sync, so...
        sync_in_bulk = functools.partial(self.in_bulk.__wrapped__, self)
        insert_future = get_future(self)

        with MonkeyPatcher() as patcher:
            # here we change the method with the async api for the method
            # with a sync api so I don't need to rewrite the mongoengine
            # method.
            patcher.patch_item(self, 'in_bulk', sync_in_bulk, undo=False)
            future = asynchronize(super_insert)(self,
                                                doc_or_docs,
                                                load_bulk=load_bulk,
                                                write_concern=write_concern)

            def cb(future):
                try:
                    result = future.result()
                    insert_future.set_result(result)
                except Exception as e:
                    insert_future.set_exception(e)
                finally:
                    patcher.patch_item(self,
                                       'in_bulk',
                                       async_in_bulk,
                                       undo=False)

            future.add_done_callback(cb)

        return insert_future
Exemplo n.º 8
0
    def replace(self, file_obj, **kwargs):
        del_future = self.delete()

        ret_future = get_future(self)

        def del_cb(del_future):
            put_future = self.put(file_obj, **kwargs)

            def put_cb(put_future):
                result = put_future.result()
                ret_future.set_result(result)

            put_future.add_done_callback(put_cb)

        del_future.add_done_callback(del_cb)

        return ret_future
Exemplo n.º 9
0
    def upsert_one(self, write_concern=None, **update):
        """Overwrite or add the first document matched by the query.

        :param write_concern: Extra keyword arguments are passed down which
            will be used as options for the resultant
            ``getLastError`` command.  For example,
            ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
            wait until at least two servers have recorded the write and
            will force an fsync on the primary server.
        :param update: Django-style update keyword arguments

        :returns the new or overwritten document

        """

        update_future = self.update(multi=False,
                                    upsert=True,
                                    write_concern=write_concern,
                                    full_result=True,
                                    **update)

        upsert_future = get_future(self)

        def update_cb(update_future):
            try:
                result = update_future.result()
                if result['updatedExisting']:
                    document_future = self.first()
                else:
                    document_future = self._document.objects.with_id(
                        result['upserted'])

                def doc_cb(document_future):
                    try:
                        result = document_future.result()
                        upsert_future.set_result(result)
                    except Exception as e:
                        upsert_future.set_exception(e)

                document_future.add_done_callback(doc_cb)
            except Exception as e:
                upsert_future.set_exception(e)

        update_future.add_done_callback(update_cb)
        return upsert_future
Exemplo n.º 10
0
    def first(self):
        """Retrieve the first object matching the query.
        """
        queryset = self.clone()
        first_future = queryset[0]
        future = get_future(self)

        def first_cb(first_future):
            try:
                result = first_future.result()
                future.set_result(result)
            except IndexError:
                result = None
                future.set_result(result)
            except Exception as e:
                future.set_exception(e)

        first_future.add_done_callback(first_cb)
        return future
Exemplo n.º 11
0
    def aggregate_average(self, field):
        """Average over the values of the specified field.

        :param field: the field to average over; use dot-notation to refer to
            embedded document fields

        This method is more performant than the regular `average`, because it
        uses the aggregation framework instead of map-reduce.
        """
        cursor = self._document._get_collection().aggregate([{
            '$match':
            self._query
        }, {
            '$group': {
                '_id': 'avg',
                'total': {
                    '$avg': '$' + field
                }
            }
        }])

        fn_future = cursor.fetch_next
        future = get_future(self)

        def fetch_next_cb(fn_future):
            result = fn_future.result()
            if result:
                doc = cursor.next_object()
                avg = doc['total']
            else:
                avg = 0

            future.set_result(avg)

        fn_future.add_done_callback(fetch_next_cb)
        return future
Exemplo n.º 12
0
    def to_list(self, length=100):
        """Returns a list of the current documents in the queryset.

        :param length: maximum number of documents to return for this call."""

        list_future = get_future(self)

        def _to_list_cb(future):
            # Transforms mongo's raw documents into
            # SanicMongo documents
            docs_list = future.result()
            final_list = [
                self._document._from_son(
                    d,
                    _auto_dereference=self._auto_dereference,
                    only_fields=self.only_fields) for d in docs_list
            ]

            list_future.set_result(final_list)

        cursor = self._cursor
        future = cursor.to_list(length)
        future.add_done_callback(_to_list_cb)
        return list_future
Exemplo n.º 13
0
    def _check_delete_rules(self, doc, queryset, cascade_refs, write_concern):
        """Checks the delete rules for documents being deleted in a queryset.
        Raises an exception if any document has a DENY rule."""

        delete_rules = doc._meta.get('delete_rules') or {}
        # Check for DENY rules before actually deleting/nullifying any other
        # references
        for rule_entry in delete_rules:
            document_cls, field_name = rule_entry
            if document_cls._meta.get('abstract'):
                continue
            rule = doc._meta['delete_rules'][rule_entry]
            if rule == DENY and document_cls.objects(**{
                    field_name + '__in': self
            }).count() > 0:
                msg = ("Could not delete document (%s.%s refers to it)" %
                       (document_cls.__name__, field_name))
                raise OperationError(msg)

        ret_future = get_future(self)

        # We need to set result for the future if there's no rules otherwise
        # the callbacks will never be called.
        if not delete_rules:
            ret_future.set_result(None)

        for rule_entry in delete_rules:
            document_cls, field_name = rule_entry
            if document_cls._meta.get('abstract'):
                continue
            rule = doc._meta['delete_rules'][rule_entry]
            if rule == CASCADE:
                cascade_refs = set() if cascade_refs is None else cascade_refs
                for ref in queryset:
                    cascade_refs.add(ref.id)
                ref_q = document_cls.objects(**{
                    field_name + '__in': self,
                    'id__nin': cascade_refs
                })

                ref_q_count_future = ref_q.count()

                def count_cb(count_future):
                    try:
                        count = count_future.result()
                        if count > 0:
                            del_future = ref_q.delete(
                                write_concern=write_concern,
                                cascade_refs=cascade_refs)

                            def del_cb(del_future):
                                try:
                                    r = del_future.result()
                                    ret_future.set_result(r)
                                except Exception as e:
                                    ret_future.set_exception(e)

                            del_future.add_done_callback(del_cb)
                    except Exception as e:
                        ret_future.set_exception(e)

                ref_q_count_future.add_done_callback(count_cb)

            elif rule in (NULLIFY, PULL):
                if rule == NULLIFY:
                    updatekw = {'unset__%s' % field_name: 1}
                else:
                    updatekw = {'pull_all__%s' % field_name: self}

                update_future = document_cls.objects(**{
                    field_name + '__in': self
                }).update(write_concern=write_concern, **updatekw)

                def update_cb(update_future):
                    try:
                        result = update_future.result()
                        ret_future.set_result(result)
                    except Exception as e:
                        ret_future.set_exception(e)

                update_future.add_done_callback(update_cb)

        return ret_future
Exemplo n.º 14
0
    def average(self, field):
        """Average over the values of the specified field.

        :param field: the field to average over; use dot-notation to refer to
            embedded document fields
        """
        map_func = """
            function() {
                var path = '{{~%(field)s}}'.split('.'),
                field = this;

                for (p in path) {
                    if (typeof field != 'undefined')
                       field = field[path[p]];
                    else
                       break;
                }

                if (field && field.constructor == Array) {
                    field.forEach(function(item) {
                        emit(1, {t: item||0, c: 1});
                    });
                } else if (typeof field != 'undefined') {
                    emit(1, {t: field||0, c: 1});
                }
            }
        """ % dict(field=field)

        reduce_func = Code("""
            function(key, values) {
                var out = {t: 0, c: 0};
                for (var i in values) {
                    var value = values[i];
                    out.t += value.t;
                    out.c += value.c;
                }
                return out;
            }
        """)

        finalize_func = Code("""
            function(key, value) {
                return value.t / value.c;
            }
        """)

        future = get_future(self)
        mr_future = self.inline_map_reduce(map_func,
                                           reduce_func,
                                           finalize=finalize_func)

        def average_cb(mr_future):
            results = mr_future.result()
            for result in results:
                average = result.value
                break
            else:
                average = 0

            future.set_result(average)

        mr_future.add_done_callback(average_cb)
        return future
Exemplo n.º 15
0
    def item_frequencies(self, field, normalize=False):
        """Returns a dictionary of all items present in a field across
        the whole queried set of documents, and their corresponding frequency.
        This is useful for generating tag clouds, or searching documents.

        .. note::

            Can only do direct simple mappings and cannot map across
            :class:`~mongoengine.fields.ReferenceField` or
            :class:`~mongoengine.fields.GenericReferenceField` for more complex
            counting a manual map reduce call would is required.

        If the field is a :class:`~mongoengine.fields.ListField`,
        the items within each list will be counted individually.

        :param field: the field to use
        :param normalize: normalize the results so they add to 1.0
        :param map_reduce: Use map_reduce over exec_js
        """

        map_func = """
            function() {
                var path = '{{~%(field)s}}'.split('.');
                var field = this;

                for (p in path) {
                    if (typeof field != 'undefined')
                       field = field[path[p]];
                    else
                       break;
                }
                if (field && field.constructor == Array) {
                    field.forEach(function(item) {
                        emit(item, 1);
                    });
                } else if (typeof field != 'undefined') {
                    emit(field, 1);
                } else {
                    emit(null, 1);
                }
            }
        """ % dict(field=field)
        reduce_func = """
            function(key, values) {
                var total = 0;
                var valuesSize = values.length;
                for (var i=0; i < valuesSize; i++) {
                    total += parseInt(values[i], 10);
                }
                return total;
            }
        """
        mr_future = self.inline_map_reduce(map_func, reduce_func)
        future = get_future(self)

        def item_frequencies_cb(mr_future):
            values = mr_future.result()
            frequencies = {}
            for f in values:
                key = f.key
                if isinstance(key, float):
                    if int(key) == key:
                        key = int(key)
                frequencies[key] = int(f.value)

            if normalize:
                count = sum(frequencies.values())
                frequencies = dict([(k, float(v) / count)
                                    for k, v in list(frequencies.items())])

            future.set_result(frequencies)

        mr_future.add_done_callback(item_frequencies_cb)
        return future
Exemplo n.º 16
0
    def delete(self,
               write_concern=None,
               _from_doc_delete=False,
               cascade_refs=None):
        """Deletes the documents matched by the query.

        :param write_concern: Extra keyword arguments are passed down which
            will be used as options for the resultant
            ``getLastError`` command.  For example,
            ``save(..., write_concern={w: 2, fsync: True}, ...)`` will
            wait until at least two servers have recorded the write and
            will force an fsync on the primary server.
        :param _from_doc_delete: True when called from document delete
          therefore signals will have been triggered so don't loop.

        :returns number of deleted documents
        """

        queryset = self.clone()
        doc = queryset._document

        if write_concern is None:
            write_concern = {}

        # Handle deletes where skips or limits have been applied or
        # there is an untriggered delete signal
        has_delete_signal = signals.signals_available and (
            signals.pre_delete.has_receivers_for(self._document)
            or signals.post_delete.has_receivers_for(self._document))

        call_document_delete = (queryset._skip or queryset._limit
                                or has_delete_signal) and not _from_doc_delete

        if call_document_delete:
            async_method = asynchronize(self._document_delete)
            return async_method(queryset, write_concern)

        dr_future = self._check_delete_rules(doc, queryset, cascade_refs,
                                             write_concern)

        ret_future = get_future(self)

        def dr_cb(dr_future):
            """callback for _check_delete_rules future"""
            try:
                dr_future.result()

                remove_future = queryset._collection.remove(
                    queryset._query, **write_concern)

                def r_cb(remove_future):
                    """Callback for _collection.remove"""

                    try:
                        result = remove_future.result()
                        if result:
                            ret_future.set_result(result.get('n'))
                    except Exception as e:
                        ret_future.set_exception(e)

                remove_future.add_done_callback(r_cb)

            except Exception as e:
                ret_future.set_exception(e)

        dr_future.add_done_callback(dr_cb)
        return ret_future