Example #1
0
def _first_batch(sock_info, namespace, query, limit, slave_ok, codec_options, read_preference):
    """Simple query helper for retrieving a first (and possibly only) batch."""
    query = _Query(0, namespace, 0, limit, query, None, codec_options, read_preference)
    request_id, msg, max_doc_size = query.get_message(slave_ok, sock_info.is_mongos)
    sock_info.send_message(msg, max_doc_size)
    response = sock_info.receive_message(1, request_id)
    return _unpack_response(response, None, codec_options)
Example #2
0
def _first_batch(sock_info, namespace, query, ntoreturn, slave_ok, codec_options, read_preference, cmd, listeners):
    """Simple query helper for retrieving a first (and possibly only) batch."""
    query = _Query(0, namespace, 0, ntoreturn, query, None, codec_options, read_preference, 0, ntoreturn)

    name = next(iter(cmd))
    duration = None
    publish = listeners.enabled_for_commands
    if publish:
        start = datetime.datetime.now()

    request_id, msg, max_doc_size = query.get_message(slave_ok, sock_info.is_mongos)

    if publish:
        encoding_duration = datetime.datetime.now() - start
        listeners.publish_command_start(cmd, namespace.split(".", 1)[0], request_id, sock_info.address)
        start = datetime.datetime.now()

    sock_info.send_message(msg, max_doc_size)
    response = sock_info.receive_message(1, request_id)
    try:
        result = _unpack_response(response, None, codec_options)
    except Exception as exc:
        if publish:
            duration = (datetime.datetime.now() - start) + encoding_duration
            if isinstance(exc, (NotMasterError, OperationFailure)):
                failure = exc.details
            else:
                failure = _convert_exception(exc)
            listeners.publish_command_failure(duration, failure, name, request_id, sock_info.address)
        raise
    if publish:
        duration = (datetime.datetime.now() - start) + encoding_duration
        listeners.publish_command_success(duration, result, name, request_id, sock_info.address)

    return result
Example #3
0
def _first_batch(sock_info, namespace, query, ntoreturn, slave_ok,
                 codec_options, read_preference):
    """Simple query helper for retrieving a first (and possibly only) batch."""
    query = _Query(0, namespace, 0, ntoreturn, query, None, codec_options,
                   read_preference, 0, ntoreturn)
    request_id, msg, max_doc_size = query.get_message(slave_ok,
                                                      sock_info.is_mongos)
    sock_info.send_message(msg, max_doc_size)
    response = sock_info.receive_message(1, request_id)
    return _unpack_response(response, None, codec_options)
Example #4
0
    def _refresh(self):
        """Refreshes the cursor with more data from Mongo.

        Returns the length of self.__data after refresh. Will exit early if
        self.__data is already non-empty. Raises OperationFailure when the
        cursor cannot be refreshed due to an error on the query.
        """
        if len(self.__data) or self.__killed:
            return len(self.__data)

        if self.__id is None:  # Query
            ntoreturn = self.__batch_size
            if self.__limit:
                if self.__batch_size:
                    ntoreturn = min(self.__limit, self.__batch_size)
                else:
                    ntoreturn = self.__limit
            self.__send_message(_Query(self.__query_flags,
                                       self.__collection.database.name,
                                       self.__collection.name,
                                       self.__skip,
                                       ntoreturn,
                                       self.__query_spec(),
                                       self.__projection,
                                       self.__codec_options,
                                       self.__read_preference,
                                       self.__limit,
                                       self.__batch_size,
                                       self.__read_concern))
            if not self.__id:
                self.__killed = True
        elif self.__id:  # Get More
            if self.__limit:
                limit = self.__limit - self.__retrieved
                if self.__batch_size:
                    limit = min(limit, self.__batch_size)
            else:
                limit = self.__batch_size

            # Exhaust cursors don't send getMore messages.
            if self.__exhaust:
                self.__send_message(None)
            else:
                self.__send_message(_GetMore(self.__collection.database.name,
                                             self.__collection.name,
                                             limit,
                                             self.__id,
                                             self.__codec_options,
                                             self.__max_await_time_ms))

        else:  # Cursor id is zero nothing else to return
            self.__killed = True

        return len(self.__data)
Example #5
0
def _first_batch(sock_info, namespace, query, ntoreturn, slave_ok,
                 codec_options, read_preference, cmd, listeners):
    """Simple query helper for retrieving a first (and possibly only) batch."""
    query = _Query(0, namespace, 0, ntoreturn, query, None, codec_options,
                   read_preference, 0, ntoreturn)

    name = next(iter(cmd))
    duration = None
    publish = listeners.enabled_for_commands
    if publish:
        start = datetime.datetime.now()

    request_id, msg, max_doc_size = query.get_message(slave_ok,
                                                      sock_info.is_mongos)

    if publish:
        encoding_duration = datetime.datetime.now() - start
        listeners.publish_command_start(cmd,
                                        namespace.split('.', 1)[0], request_id,
                                        sock_info.address)
        start = datetime.datetime.now()

    sock_info.send_message(msg, max_doc_size)
    response = sock_info.receive_message(1, request_id)
    try:
        result = _unpack_response(response, None, codec_options)
    except Exception as exc:
        if publish:
            duration = (datetime.datetime.now() - start) + encoding_duration
            if isinstance(exc, (NotMasterError, OperationFailure)):
                failure = exc.details
            else:
                failure = _convert_exception(exc)
            listeners.publish_command_failure(duration, failure, name,
                                              request_id, sock_info.address)
        raise
    if publish:
        duration = (datetime.datetime.now() - start) + encoding_duration
        listeners.publish_command_success(duration, result, name, request_id,
                                          sock_info.address)

    return result
Example #6
0
    async def _refresh(self) -> None:

        if len(self.__data) or self.__killed:
            return 0

        if self.__connection is None:
            self.__connection = await self.__collection.database.client.get_connection()

        is_query = False
        if self.__id is None:
            is_query = True
            data = await self.__connection.perform_operation(
                _Query(self.__query_flags,
                       self.__collection.database.name,
                       self.__collection.name,
                       self.__skip,
                       self.__query_spec(),
                       self.__projection,
                       self.__codec_options,
                       self.__read_preference,
                       self.__limit,
                       self.__batch_size,
                       self.__read_concern,
                       self.__collation)
            )
        elif self.__id:
            if self.__limit:
                limit = self.__limit - self.__retrieved
                if self.__batch_size:
                    limit = min(limit, self.__batch_size)
            else:
                limit = self.__batch_size

            try:
                data = await self.__connection.perform_operation(
                    _GetMore(self.__collection.database.name,
                             self.__collection.name,
                             limit,
                             self.__id,
                             self.__codec_options,
                             self.__max_await_time_ms)
                )
            except EOFError:
                self.__killed = True
                raise
        else:
            self.__killed = True
            self.__data = data = None

        if data:
            doc = helpers._unpack_response(response=data,
                                           cursor_id=self.__id,
                                           codec_options=self.__codec_options)

            helpers._check_command_response(doc['data'][0])

            if not self.__explain:
                cursor = doc['data'][0]['cursor']
                self.__id = cursor['id']

                if is_query:
                    documents = cursor['firstBatch']
                else:
                    documents = cursor['nextBatch']
                self.__data = deque(documents)

                self.__retrieved += len(documents)
            else:
                self.__id = doc['cursor_id']
                self.__data = deque(doc['data'])
                self.__retrieved += doc['number_returned']

        if self.__id == 0:
            self.__killed = True

        if self.__limit and self.__id and self.__limit <= self.__retrieved:
            await self.close()

        return len(self.__data)