Beispiel #1
0
        def second_lookup():
            with LoggingContext("12") as context_12:
                context_12.request = "12"
                self.http_client.post_json.reset_mock()
                self.http_client.post_json.return_value = defer.Deferred()

                res_deferreds_2 = kr.verify_json_objects_for_server([
                    ("server10", json1, 0, "test")
                ])
                res_deferreds_2[0].addBoth(self.check_context, None)
                yield make_deferred_yieldable(res_deferreds_2[0])

                # let verify_json_objects_for_server finish its work before we kill the
                # logcontext
                yield self.clock.sleep(0)
Beispiel #2
0
        def get_file(destination, path, output_stream, args=None, max_size=None):
            """
            Returns tuple[int,dict,str,int] of file length, response headers,
            absolute URI, and response code.
            """

            def write_to(r):
                data, response = r
                output_stream.write(data)
                return response

            d = Deferred()
            d.addCallback(write_to)
            self.fetches.append((d, destination, path, args))
            return make_deferred_yieldable(d)
 def test_send_large_txns_ephemeral(self):
     d = defer.Deferred()
     self.txn_ctrl.send = Mock(
         side_effect=lambda x, y, z: make_deferred_yieldable(d)
     )
     # Expect the event to be sent immediately.
     service = Mock(id=4, name="service")
     first_chunk = [Mock(name="event%i" % (i + 1)) for i in range(100)]
     second_chunk = [Mock(name="event%i" % (i + 101)) for i in range(50)]
     event_list = first_chunk + second_chunk
     self.queuer.enqueue_ephemeral(service, event_list)
     self.txn_ctrl.send.assert_called_once_with(service, [], first_chunk)
     d.callback(service)
     self.txn_ctrl.send.assert_called_with(service, [], second_chunk)
     self.assertEquals(2, self.txn_ctrl.send.call_count)
Beispiel #4
0
    def on_REPLICATE(self, cmd):
        stream_name = cmd.stream_name
        token = cmd.token

        if stream_name == "ALL":
            # Subscribe to all streams we're publishing to.
            deferreds = [
                run_in_background(self.subscribe_to_stream, stream, token)
                for stream in iterkeys(self.streamer.streams_by_name)
            ]

            return make_deferred_yieldable(
                defer.gatherResults(deferreds, consumeErrors=True))
        else:
            return self.subscribe_to_stream(stream_name, token)
Beispiel #5
0
    def request(self, method, uri, data=None, headers=None):
        """
        Args:
            method (str): HTTP method to use.
            uri (str): URI to query.
            data (bytes): Data to send in the request body, if applicable.
            headers (t.w.http_headers.Headers): Request headers.
        """
        # A small wrapper around self.agent.request() so we can easily attach
        # counters to it
        outgoing_requests_counter.labels(method).inc()

        # log request but strip `access_token` (AS requests for example include this)
        logger.info("Sending request %s %s", method, redact_uri(uri))

        try:
            body_producer = None
            if data is not None:
                body_producer = QuieterFileBodyProducer(BytesIO(data))

            request_deferred = treq.request(method,
                                            uri,
                                            agent=self.agent,
                                            data=body_producer,
                                            headers=headers,
                                            **self._extra_treq_args)
            request_deferred = timeout_deferred(
                request_deferred,
                60,
                self.hs.get_reactor(),
                cancelled_to_request_timed_out_error,
            )
            response = yield make_deferred_yieldable(request_deferred)

            incoming_responses_counter.labels(method, response.code).inc()
            logger.info("Received response to %s %s: %s", method,
                        redact_uri(uri), response.code)
            return response
        except Exception as e:
            incoming_responses_counter.labels(method, "ERR").inc()
            logger.info(
                "Error sending request to  %s %s: %s %s",
                method,
                redact_uri(uri),
                type(e).__name__,
                e.args[0],
            )
            raise
Beispiel #6
0
    def wrap(self, key, callback, *args, **kwargs):
        """Wrap together a *get* and *set* call, taking care of logcontexts

        First looks up the key in the cache, and if it is present makes it
        follow the synapse logcontext rules and returns it.

        Otherwise, makes a call to *callback(*args, **kwargs)*, which should
        follow the synapse logcontext rules, and adds the result to the cache.

        Example usage:

            @defer.inlineCallbacks
            def handle_request(request):
                # etc
                return result

            result = yield response_cache.wrap(
                key,
                handle_request,
                request,
            )

        Args:
            key (hashable): key to get/set in the cache

            callback (callable): function to call if the key is not found in
                the cache

            *args: positional parameters to pass to the callback, if it is used

            **kwargs: named parameters to pass to the callback, if it is used

        Returns:
            twisted.internet.defer.Deferred: yieldable result
        """
        result = self.get(key)
        if not result:
            logger.info("[%s]: no cached result for [%s], calculating new one",
                        self._name, key)
            d = run_in_background(callback, *args, **kwargs)
            result = self.set(key, d)
        elif not isinstance(result, defer.Deferred) or result.called:
            logger.info("[%s]: using completed cached result for [%s]",
                        self._name, key)
        else:
            logger.info("[%s]: using incomplete cached result for [%s]",
                        self._name, key)
        return make_deferred_yieldable(result)
Beispiel #7
0
    def _enqueue_events(self, events, allow_rejected=False):
        """Fetches events from the database using the _event_fetch_list. This
        allows batch and bulk fetching of events - it allows us to fetch events
        without having to create a new transaction for each request for events.
        """
        if not events:
            defer.returnValue({})

        events_d = defer.Deferred()
        with self._event_fetch_lock:
            self._event_fetch_list.append((events, events_d))

            self._event_fetch_lock.notify()

            if self._event_fetch_ongoing < EVENT_QUEUE_THREADS:
                self._event_fetch_ongoing += 1
                should_start = True
            else:
                should_start = False

        if should_start:
            run_as_background_process("fetch_events", self.runWithConnection,
                                      self._do_fetch)

        logger.debug("Loading %d events", len(events))
        with PreserveLoggingContext():
            rows = yield events_d
        logger.debug("Loaded %d events (%d rows)", len(events), len(rows))

        if not allow_rejected:
            rows[:] = [r for r in rows if r["rejected_reason"] is None]

        res = yield make_deferred_yieldable(
            defer.gatherResults(
                [
                    run_in_background(
                        self._get_event_from_row,
                        row["internal_metadata"],
                        row["json"],
                        row["redactions"],
                        rejected_reason=row["rejected_reason"],
                        format_version=row["format_version"],
                    ) for row in rows
                ],
                consumeErrors=True,
            ))

        defer.returnValue({e.event.event_id: e for e in res if e})
Beispiel #8
0
 def test_send_large_txns_ephemeral(self):
     d = defer.Deferred()
     self.txn_ctrl.send = Mock(return_value=make_deferred_yieldable(d))
     # Expect the event to be sent immediately.
     service = Mock(id=4, name="service")
     first_chunk = [Mock(name="event%i" % (i + 1)) for i in range(100)]
     second_chunk = [Mock(name="event%i" % (i + 101)) for i in range(50)]
     event_list = first_chunk + second_chunk
     self.scheduler.enqueue_for_appservice(service, ephemeral=event_list)
     self.txn_ctrl.send.assert_called_once_with(service, [], first_chunk,
                                                [], None, None,
                                                DeviceListUpdates())
     d.callback(service)
     self.txn_ctrl.send.assert_called_with(service, [], second_chunk, [],
                                           None, None, DeviceListUpdates())
     self.assertEqual(2, self.txn_ctrl.send.call_count)
Beispiel #9
0
    def send_email(self, email_address, subject, template_vars):
        """Send an email with the given information and template text"""
        try:
            from_string = self.hs.config.email_notif_from % {
                "app": self.app_name
            }
        except TypeError:
            from_string = self.hs.config.email_notif_from

        raw_from = email.utils.parseaddr(from_string)[1]
        raw_to = email.utils.parseaddr(email_address)[1]

        if raw_to == "":
            raise RuntimeError("Invalid 'to' address")

        html_text = self.template_html.render(**template_vars)
        html_part = MIMEText(html_text, "html", "utf8")

        plain_text = self.template_text.render(**template_vars)
        text_part = MIMEText(plain_text, "plain", "utf8")

        multipart_msg = MIMEMultipart("alternative")
        multipart_msg["Subject"] = subject
        multipart_msg["From"] = from_string
        multipart_msg["To"] = email_address
        multipart_msg["Date"] = email.utils.formatdate()
        multipart_msg["Message-ID"] = email.utils.make_msgid()
        multipart_msg.attach(text_part)
        multipart_msg.attach(html_part)

        logger.info("Sending email to %s" % email_address)

        yield make_deferred_yieldable(
            self.sendmail(
                self.hs.config.email_smtp_host,
                raw_from,
                raw_to,
                multipart_msg.as_string().encode("utf8"),
                reactor=self.hs.get_reactor(),
                port=self.hs.config.email_smtp_port,
                requireAuthentication=self.hs.config.email_smtp_user
                is not None,
                username=self.hs.config.email_smtp_user,
                password=self.hs.config.email_smtp_pass,
                requireTransportSecurity=self.hs.config.
                require_transport_security,
            ))
Beispiel #10
0
    def get_prev_state_ids(self, store):
        """Gets the prev state IDs

        Returns:
            Deferred[dict[(str, str), str]|None]: Returns None if state_group
                is None, which happens when the associated event is an outlier.
                Maps a (type, state_key) to the event ID of the state event matching
                this tuple.
        """

        if not self._fetching_state_deferred:
            self._fetching_state_deferred = run_in_background(
                self._fill_out_state, store)

        yield make_deferred_yieldable(self._fetching_state_deferred)

        defer.returnValue(self._prev_state_ids)
    def store_file(self, path, file_info):
        """See StorageProvider.store_file"""

        parent_logcontext = current_context()

        def _store_file():
            with LoggingContext(parent_context=parent_logcontext):
                self._get_s3_client().upload_file(
                    Filename=os.path.join(self.cache_directory, path),
                    Bucket=self.bucket,
                    Key=path,
                    ExtraArgs={"StorageClass": self.storage_class},
                )

        return make_deferred_yieldable(
            threads.deferToThreadPool(reactor, self._s3_pool, _store_file)
        )
Beispiel #12
0
    def put_json(self, uri, json_body, args={}, headers=None):
        """ Puts some json to the given URI.

        Args:
            uri (str): The URI to request, not including query parameters
            json_body (dict): The JSON to put in the HTTP body,
            args (dict): A dictionary used to create query strings, defaults to
                None.
                **Note**: The value of each key is assumed to be an iterable
                and *not* a string.
            headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
               header name to a list of values for that header
        Returns:
            Deferred: Succeeds when we get *any* 2xx HTTP response, with the
            HTTP body as JSON.
        Raises:
            HttpResponseException On a non-2xx HTTP response.

            ValueError: if the response was not JSON
        """
        if len(args):
            query_bytes = urllib.parse.urlencode(args, True)
            uri = "%s?%s" % (uri, query_bytes)

        json_str = encode_canonical_json(json_body)

        actual_headers = {
            b"Content-Type": [b"application/json"],
            b"User-Agent": [self.user_agent],
            b"Accept": [b"application/json"],
        }
        if headers:
            actual_headers.update(headers)

        response = yield self.request("PUT",
                                      uri,
                                      headers=Headers(actual_headers),
                                      data=json_str)

        body = yield make_deferred_yieldable(readBody(response))

        if 200 <= response.code < 300:
            return json.loads(body)
        else:
            raise HttpResponseException(response.code, response.phrase, body)
Beispiel #13
0
        def wrapped(*args, **kwargs):
            # If we're passed a cache_context then we'll want to call its invalidate()
            # whenever we are invalidated
            invalidate_callback = kwargs.pop("on_invalidate", None)

            cache_key = get_cache_key(args, kwargs)

            # Add our own `cache_context` to argument list if the wrapped function
            # has asked for one
            if self.add_cache_context:
                kwargs["cache_context"] = _CacheContext(cache, cache_key)

            try:
                cached_result_d = cache.get(cache_key,
                                            callback=invalidate_callback)

                if isinstance(cached_result_d, ObservableDeferred):
                    observer = cached_result_d.observe()
                else:
                    observer = cached_result_d

            except KeyError:
                ret = defer.maybeDeferred(preserve_fn(self.function_to_call),
                                          obj, *args, **kwargs)

                def onErr(f):
                    cache.invalidate(cache_key)
                    return f

                ret.addErrback(onErr)

                # If our cache_key is a string on py2, try to convert to ascii
                # to save a bit of space in large caches. Py3 does this
                # internally automatically.
                if six.PY2 and isinstance(cache_key, string_types):
                    cache_key = to_ascii(cache_key)

                result_d = ObservableDeferred(ret, consumeErrors=True)
                cache.set(cache_key, result_d, callback=invalidate_callback)
                observer = result_d.observe()

            if isinstance(observer, defer.Deferred):
                return make_deferred_yieldable(observer)
            else:
                return observer
Beispiel #14
0
    def persist_event(self,
                      event: FrozenEvent,
                      context: EventContext,
                      backfilled: bool = False):
        """
        Returns:
            Deferred[Tuple[int, int]]: the stream ordering of ``event``,
            and the stream ordering of the latest persisted event
        """
        deferred = self._event_persist_queue.add_to_queue(
            event.room_id, [(event, context)], backfilled=backfilled)

        self._maybe_start_persisting(event.room_id)

        yield make_deferred_yieldable(deferred)

        max_persisted_id = yield self.main_store.get_current_events_token()
        return (event.internal_metadata.stream_ordering, max_persisted_id)
Beispiel #15
0
 def test_send_single_event_with_queue(self):
     d = defer.Deferred()
     self.txn_ctrl.send = Mock(side_effect=lambda x, y: make_deferred_yieldable(d))
     service = Mock(id=4)
     event = Mock(event_id="first")
     event2 = Mock(event_id="second")
     event3 = Mock(event_id="third")
     # Send an event and don't resolve it just yet.
     self.queuer.enqueue(service, event)
     # Send more events: expect send() to NOT be called multiple times.
     self.queuer.enqueue(service, event2)
     self.queuer.enqueue(service, event3)
     self.txn_ctrl.send.assert_called_with(service, [event])
     self.assertEquals(1, self.txn_ctrl.send.call_count)
     # Resolve the send event: expect the queued events to be sent
     d.callback(service)
     self.txn_ctrl.send.assert_called_with(service, [event2, event3])
     self.assertEquals(2, self.txn_ctrl.send.call_count)
Beispiel #16
0
def yieldable_gather_results(func, iter, *args, **kwargs):
    """Executes the function with each argument concurrently.

    Args:
        func (func): Function to execute that returns a Deferred
        iter (iter): An iterable that yields items that get passed as the first
            argument to the function
        *args: Arguments to be passed to each call to func

    Returns
        Deferred[list]: Resolved when all functions have been invoked, or errors if
        one of the function calls fails.
    """
    return make_deferred_yieldable(
        defer.gatherResults(
            [run_in_background(func, item, *args, **kwargs) for item in iter],
            consumeErrors=True,
        )).addErrback(unwrapFirstError)
Beispiel #17
0
    def update_state(self, state_group, prev_state_ids, current_state_ids,
                     prev_group, delta_ids):
        """Replace the state in the context
        """

        # We need to make sure we wait for any ongoing fetching of state
        # to complete so that the updated state doesn't get clobbered
        if self._fetching_state_deferred:
            yield make_deferred_yieldable(self._fetching_state_deferred)

        self.state_group = state_group
        self._prev_state_ids = prev_state_ids
        self.prev_group = prev_group
        self._current_state_ids = current_state_ids
        self.delta_ids = delta_ids

        # We need to ensure that that we've marked as having fetched the state
        self._fetching_state_deferred = defer.succeed(None)
def _handle_json_response(reactor, timeout_sec, request, response):
    """
    Reads the JSON body of a response, with a timeout

    Args:
        reactor (IReactor): twisted reactor, for the timeout
        timeout_sec (float): number of seconds to wait for response to complete
        request (MatrixFederationRequest): the request that triggered the response
        response (IResponse): response to the request

    Returns:
        dict: parsed JSON response
    """
    try:
        check_content_type_is_json(response.headers)

        d = treq.json_content(response)
        d = timeout_deferred(d, timeout=timeout_sec, reactor=reactor)

        body = yield make_deferred_yieldable(d)
    except TimeoutError as e:
        logger.warning(
            "{%s} [%s] Timed out reading response",
            request.txn_id,
            request.destination,
        )
        raise RequestSendFailed(e, can_retry=True) from e
    except Exception as e:
        logger.warning(
            "{%s} [%s] Error reading response: %s",
            request.txn_id,
            request.destination,
            e,
        )
        raise
    logger.info(
        "{%s} [%s] Completed: %d %s",
        request.txn_id,
        request.destination,
        response.code,
        response.phrase.decode("ascii", errors="replace"),
    )
    return body
Beispiel #19
0
    def post_urlencoded_get_raw(self, url, args={}):
        query_bytes = urllib.parse.urlencode(encode_urlencode_args(args), True)

        response = yield self.request(
            "POST",
            url,
            data=query_bytes,
            headers=Headers({
                b"Content-Type": [b"application/x-www-form-urlencoded"],
                b"User-Agent": [self.user_agent],
            }),
        )

        try:
            body = yield make_deferred_yieldable(readBody(response))
            return body
        except PartialDownloadError as e:
            # twisted dislikes google's response, no content length.
            return e.response
    def test_make_deferred_yieldable(self):
        # a function which returns an incomplete deferred, but doesn't follow
        # the synapse rules.
        def blocking_function():
            d = defer.Deferred()
            reactor.callLater(0, d.callback, None)
            return d

        sentinel_context = current_context()

        with LoggingContext("one"):
            d1 = make_deferred_yieldable(blocking_function())
            # make sure that the context was reset by make_deferred_yieldable
            self.assertIs(current_context(), sentinel_context)

            yield d1

            # now it should be restored
            self._check_test_key("one")
Beispiel #21
0
    def query_3pe(self, kind, protocol, fields):
        services = yield self._get_services_for_3pn(protocol)

        results = yield make_deferred_yieldable(
            defer.DeferredList(
                [
                    run_in_background(self.appservice_api.query_3pe, service,
                                      kind, protocol, fields)
                    for service in services
                ],
                consumeErrors=True,
            ))

        ret = []
        for (success, result) in results:
            if success:
                ret.extend(result)

        return ret
Beispiel #22
0
    def backfill(self, dest, room_id, limit, extremities):
        """Requests some more historic PDUs for the given context from the
        given destination server.

        Args:
            dest (str): The remote home server to ask.
            room_id (str): The room_id to backfill.
            limit (int): The maximum number of PDUs to return.
            extremities (list): List of PDU id and origins of the first pdus
                we have seen from the context

        Returns:
            Deferred: Results in the received PDUs.
        """
        logger.debug("backfill extrem=%s", extremities)

        # If there are no extremeties then we've (probably) reached the start.
        if not extremities:
            return

        transaction_data = yield self.transport_layer.backfill(
            dest, room_id, extremities, limit
        )

        logger.debug("backfill transaction_data=%s", repr(transaction_data))

        room_version = yield self.store.get_room_version(room_id)
        format_ver = room_version_to_event_format(room_version)

        pdus = [
            event_from_pdu_json(p, format_ver, outlier=False)
            for p in transaction_data["pdus"]
        ]

        # FIXME: We should handle signature failures more gracefully.
        pdus[:] = yield make_deferred_yieldable(
            defer.gatherResults(
                self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True
            ).addErrback(unwrapFirstError)
        )

        return pdus
Beispiel #23
0
    def post_urlencoded_get_json(self, uri, args={}, headers=None):
        """
        Args:
            uri (str):
            args (dict[str, str|List[str]]): query params
            headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
               header name to a list of values for that header

        Returns:
            Deferred[object]: parsed json

        Raises:
            HttpResponseException: On a non-2xx HTTP response.

            ValueError: if the response was not JSON
        """

        # TODO: Do we ever want to log message contents?
        logger.debug("post_urlencoded_get_json args: %s", args)

        query_bytes = urllib.parse.urlencode(encode_urlencode_args(args),
                                             True).encode("utf8")

        actual_headers = {
            b"Content-Type": [b"application/x-www-form-urlencoded"],
            b"User-Agent": [self.user_agent],
            b"Accept": [b"application/json"],
        }
        if headers:
            actual_headers.update(headers)

        response = yield self.request("POST",
                                      uri,
                                      headers=Headers(actual_headers),
                                      data=query_bytes)

        body = yield make_deferred_yieldable(readBody(response))

        if 200 <= response.code < 300:
            return json.loads(body)
        else:
            raise HttpResponseException(response.code, response.phrase, body)
        def get_file(
            destination: str,
            path: str,
            output_stream: BinaryIO,
            args: Optional[Dict[str, Union[str, List[str]]]] = None,
            max_size: Optional[int] = None,
        ) -> Deferred:
            """
            Returns tuple[int,dict,str,int] of file length, response headers,
            absolute URI, and response code.
            """
            def write_to(r):
                data, response = r
                output_stream.write(data)
                return response

            d = Deferred()
            d.addCallback(write_to)
            self.fetches.append((d, destination, path, args))
            return make_deferred_yieldable(d)
Beispiel #25
0
def respond_with_file(request,
                      media_type,
                      file_path,
                      file_size=None,
                      upload_name=None):
    logger.debug("Responding with %r", file_path)

    if os.path.isfile(file_path):
        if file_size is None:
            stat = os.stat(file_path)
            file_size = stat.st_size

        add_file_headers(request, media_type, file_size, upload_name)

        with open(file_path, "rb") as f:
            yield make_deferred_yieldable(FileSender().beginFileTransfer(
                f, request))

        finish_request(request)
    else:
        respond_404(request)
Beispiel #26
0
    def persist_event(self, event, context, backfilled=False):
        """

        Args:
            event (EventBase):
            context (EventContext):
            backfilled (bool):

        Returns:
            Deferred: resolves to (int, int): the stream ordering of ``event``,
            and the stream ordering of the latest persisted event
        """
        deferred = self._event_persist_queue.add_to_queue(
            event.room_id, [(event, context)], backfilled=backfilled)

        self._maybe_start_persisting(event.room_id)

        yield make_deferred_yieldable(deferred)

        max_persisted_id = yield self.main_store.get_current_events_token()
        return (event.internal_metadata.stream_ordering, max_persisted_id)
Beispiel #27
0
        def _wrapped(*args, **kwargs):
            # If we're passed a cache_context then we'll want to call its invalidate()
            # whenever we are invalidated
            invalidate_callback = kwargs.pop("on_invalidate", None)

            cache_key = get_cache_key(args, kwargs)

            try:
                ret = cache.get(cache_key, callback=invalidate_callback)
            except KeyError:
                # Add our own `cache_context` to argument list if the wrapped function
                # has asked for one
                if self.add_cache_context:
                    kwargs["cache_context"] = _CacheContext.get_instance(
                        cache, cache_key)

                ret = defer.maybeDeferred(preserve_fn(self.orig), obj, *args,
                                          **kwargs)
                ret = cache.set(cache_key, ret, callback=invalidate_callback)

            return make_deferred_yieldable(ret)
Beispiel #28
0
    def read(self, key):
        new_defer = defer.Deferred()

        curr_readers = self.key_to_current_readers.setdefault(key, set())
        curr_writer = self.key_to_current_writer.get(key, None)

        curr_readers.add(new_defer)

        # We wait for the latest writer to finish writing. We can safely ignore
        # any existing readers... as they're readers.
        yield make_deferred_yieldable(curr_writer)

        @contextmanager
        def _ctx_manager():
            try:
                yield
            finally:
                new_defer.callback(None)
                self.key_to_current_readers.get(key, set()).discard(new_defer)

        defer.returnValue(_ctx_manager())
Beispiel #29
0
    def post_json_get_json(self, uri, post_json, headers=None):
        """

        Args:
            uri (str):
            post_json (object):
            headers (dict[str|bytes, List[str|bytes]]|None): If not None, a map from
               header name to a list of values for that header

        Returns:
            Deferred[object]: parsed json

        Raises:
            HttpResponseException: On a non-2xx HTTP response.

            ValueError: if the response was not JSON
        """
        json_str = encode_canonical_json(post_json)

        logger.debug("HTTP POST %s -> %s", json_str, uri)

        actual_headers = {
            b"Content-Type": [b"application/json"],
            b"User-Agent": [self.user_agent],
            b"Accept": [b"application/json"],
        }
        if headers:
            actual_headers.update(headers)

        response = yield self.request("POST",
                                      uri,
                                      headers=Headers(actual_headers),
                                      data=json_str)

        body = yield make_deferred_yieldable(readBody(response))

        if 200 <= response.code < 300:
            return json.loads(body)
        else:
            raise HttpResponseException(response.code, response.phrase, body)
Beispiel #30
0
 def test_send_single_event_with_queue(self):
     d = defer.Deferred()
     self.txn_ctrl.send = Mock(return_value=make_deferred_yieldable(d))
     service = Mock(id=4)
     event = Mock(event_id="first")
     event2 = Mock(event_id="second")
     event3 = Mock(event_id="third")
     # Send an event and don't resolve it just yet.
     self.scheduler.enqueue_for_appservice(service, events=[event])
     # Send more events: expect send() to NOT be called multiple times.
     # (call enqueue_for_appservice multiple times deliberately)
     self.scheduler.enqueue_for_appservice(service, events=[event2])
     self.scheduler.enqueue_for_appservice(service, events=[event3])
     self.txn_ctrl.send.assert_called_with(service, [event], [], [], None,
                                           None, DeviceListUpdates())
     self.assertEqual(1, self.txn_ctrl.send.call_count)
     # Resolve the send event: expect the queued events to be sent
     d.callback(service)
     self.txn_ctrl.send.assert_called_with(service, [event2, event3], [],
                                           [], None, None,
                                           DeviceListUpdates())
     self.assertEqual(2, self.txn_ctrl.send.call_count)