Example #1
0
 def _strip_prefix(self, key):
     """Takes bytes, emits string."""
     key = ensure_bytes(key)
     for prefix in self.task_keyprefix, self.group_keyprefix:
         if key.startswith(prefix):
             return bytes_to_str(key[len(prefix):])
     return bytes_to_str(key)
Example #2
0
    def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        # shift 3 bits right to get signature length
        # 2048bit rsa key has a signature length of 256
        # 4096bit rsa key has a signature length of 512
        sig_len = signer_cert.get_pubkey().key_size >> 3
        sep_len = len(sep)
        signature_start_position = first_sep + sep_len
        signature_end_position = signature_start_position + sig_len
        signature = raw_payload[
            signature_start_position:signature_end_position
        ]

        v = raw_payload[signature_end_position + sep_len:].split(sep)

        return {
            'signer': signer,
            'signature': signature,
            'content_type': bytes_to_str(v[0]),
            'content_encoding': bytes_to_str(v[1]),
            'body': bytes_to_str(v[2]),
        }
Example #3
0
    def get_many(self, task_ids, timeout=None, interval=0.5):
        ids = set(task_ids)
        cached_ids = set()
        for task_id in ids:
            try:
                cached = self._cache[task_id]
            except KeyError:
                pass
            else:
                if cached['status'] in states.READY_STATES:
                    yield bytes_to_str(task_id), cached
                    cached_ids.add(task_id)

        ids.difference_update(cached_ids)
        iterations = 0
        while ids:
            keys = list(ids)
            r = self._mget_to_results(self.mget([self.get_key_for_task(k)
                                                 for k in keys]), keys)
            self._cache.update(r)
            ids.difference_update(set(map(bytes_to_str, r)))
            for key, value in items(r):
                yield bytes_to_str(key), value
            if timeout and iterations * interval >= timeout:
                raise TimeoutError('Operation timed out ({0})'.format(timeout))
            time.sleep(interval)  # don't busy loop.
            iterations += 1
Example #4
0
    def get_many(self, task_ids, timeout=None, interval=0.5, no_ack=True,
                 on_message=None, on_interval=None,
                 READY_STATES=states.READY_STATES):
        interval = 0.5 if interval is None else interval
        ids = task_ids if isinstance(task_ids, set) else set(task_ids)
        cached_ids = set()
        cache = self._cache
        for task_id in ids:
            try:
                cached = cache[task_id]
            except KeyError:
                pass
            else:
                if cached['status'] in READY_STATES:
                    yield bytes_to_str(task_id), cached
                    cached_ids.add(task_id)

        ids.difference_update(cached_ids)
        iterations = 0
        while ids:
            keys = list(ids)
            r = self._mget_to_results(self.mget([self.get_key_for_task(k)
                                                 for k in keys]), keys)
            cache.update(r)
            ids.difference_update({bytes_to_str(v) for v in r})
            for key, value in items(r):
                if on_message is not None:
                    on_message(value)
                yield bytes_to_str(key), value
            if timeout and iterations * interval >= timeout:
                raise TimeoutError('Operation timed out ({0})'.format(timeout))
            if on_interval:
                on_interval()
            time.sleep(interval)  # don't busy loop.
            iterations += 1
Example #5
0
 def _handle_message(self, client, r):
     if bytes_to_str(r[0]) == "unsubscribe" and r[2] == 0:
         client.subscribed = False
     elif bytes_to_str(r[0]) == "pmessage":
         return {"type": r[0], "pattern": r[1], "channel": r[2], "data": r[3]}
     else:
         return {"type": r[0], "pattern": None, "channel": r[1], "data": r[2]}
Example #6
0
    def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        sig_len = signer_cert._cert.get_pubkey().bits() >> 3
        signature = raw_payload[
            first_sep + len(sep):first_sep + len(sep) + sig_len
        ]
        end_of_sig = first_sep + len(sep) + sig_len+len(sep)

        v = raw_payload[end_of_sig:].split(sep)

        values = [bytes_to_str(signer), bytes_to_str(signature),
                  bytes_to_str(v[0]), bytes_to_str(v[1]), bytes_to_str(v[2])]

        return {
            'signer': values[0],
            'signature': values[1],
            'content_type': values[2],
            'content_encoding': values[3],
            'body': values[4],
        }
Example #7
0
 def _message_to_python(self, message, queue_name, queue):
     try:
         body = base64.b64decode(message['Body'].encode())
     except TypeError:
         body = message['Body'].encode()
     payload = loads(bytes_to_str(body))
     if queue_name in self._noack_queues:
         queue = self._new_queue(queue_name)
         self.asynsqs.delete_message(queue, message['ReceiptHandle'])
     else:
         try:
             properties = payload['properties']
             delivery_info = payload['properties']['delivery_info']
         except KeyError:
             # json message not sent by kombu?
             delivery_info = {}
             properties = {'delivery_info': delivery_info}
             payload.update({
                 'body': bytes_to_str(body),
                 'properties': properties,
             })
         # set delivery tag to SQS receipt handle
         delivery_info.update({
             'sqs_message': message, 'sqs_queue': queue,
         })
         properties['delivery_tag'] = message['ReceiptHandle']
     return payload
Example #8
0
 def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
     values = b64decode(ensure_bytes(payload)).split(sep)
     return {'signer': bytes_to_str(values[0]),
             'signature': ensure_bytes(values[1]),
             'content_type': bytes_to_str(values[2]),
             'content_encoding': bytes_to_str(values[3]),
             'body': ensure_bytes(values[4])}
Example #9
0
 def _strip_prefix(self, key):
     """Take bytes: emit string."""
     key = self.key_t(key)
     for prefix in self.task_keyprefix, self.group_keyprefix:
         if key.startswith(prefix):
             return bytes_to_str(key[len(prefix):])
     return bytes_to_str(key)
Example #10
0
 def _handle_message(self, client, r):
     if bytes_to_str(r[0]) == 'unsubscribe' and r[2] == 0:
         client.subscribed = False
     elif bytes_to_str(r[0]) == 'pmessage':
         return {'type':    r[0], 'pattern': r[1],
                 'channel': r[2], 'data':    r[3]}
     else:
         return {'type':    r[0], 'pattern': None,
                 'channel': r[1], 'data':    r[2]}
 def _index(self, id, body, **kwargs):
     body = {bytes_to_str(k): v for k, v in items(body)}
     return self.server.index(
         id=bytes_to_str(id),
         index=self.index,
         doc_type=self.doc_type,
         body=body,
         **kwargs
     )
Example #12
0
 def _receive(self):
     c = self.subclient
     response = None
     try:
         response = c.parse_response()
     except self.connection_errors:
         self._in_listen = False
     if response is not None:
         payload = self._handle_message(c, response)
         if bytes_to_str(payload['type']) == 'message':
             return (
                 loads(bytes_to_str(payload['data'])),
                 self._fanout_to_queue[bytes_to_str(payload['channel'])],
             )
     raise Empty()
Example #13
0
    def _handle_message(self, client, r):
        if bytes_to_str(r[0]) == 'unsubscribe' and r[2] == 0:
            client.subscribed = False
            return

        if bytes_to_str(r[0]) == 'pmessage':
            type, pattern, channel, data = r[0], r[1], r[2], r[3]
        else:
            type, pattern, channel, data = r[0], None, r[1], r[2]
        return {
            'type': type,
            'pattern': pattern,
            'channel': channel,
            'data': data,
        }
Example #14
0
    def deserialize(self, data):
        data = self._ensure_bytes(data)
        header, end = self.parse_header(data)
        # Skip whitespace
        length = len(data)
        while end < length and data[end] in self.whitespace:
            end += 1
        header, body = header, data[end:]

        signer, signature, content_type, content_encoding = (
            header['signer'], header['signature'],
            header['content_type'], header['content_encoding']
        )
        signature = self.key_codec.decode(signature)
        if content_encoding != self._content_encoding:
            raise ValueError("Invalid inner content encoding ({!r} != {!r})"
                             .format(content_encoding, self._content_encoding))

        try:
            verify_key = self._verify_keys[signer]
        except KeyError:
            raise ValueError("Unknown signer {!r}".format(signer)) from None
        verify_key.verify(body, signature)
        return loads(bytes_to_str(body), content_type, content_encoding,
                     force=True)
Example #15
0
    def set(self, key, value):
        """Set a key in Consul.

        Before creating the key it will create a session inside Consul
        where it creates a session with a TTL

        The key created afterwards will reference to the session's ID.

        If the session expires it will remove the key so that results
        can auto expire from the K/V store
        """
        session_name = bytes_to_str(key)

        key = self._key_to_consul_key(key)

        logger.debug('Trying to create Consul session %s with TTL %d',
                     session_name, self.expires)
        session_id = self.client.session.create(name=session_name,
                                                behavior='delete',
                                                ttl=self.expires)
        logger.debug('Created Consul session %s', session_id)

        logger.debug('Writing key %s to Consul', key)
        return self.client.kv.put(key=key,
                                  value=value,
                                  acquire=session_id)
Example #16
0
 def get_table(self, exchange):
     key = self.keyprefix_queue % exchange
     with self.conn_or_acquire() as client:
         values = client.smembers(key)
         if not values:
             raise InconsistencyError(NO_ROUTE_ERROR.format(exchange, key))
         return [tuple(bytes_to_str(val).split(self.sep)) for val in values]
Example #17
0
 def _get(self, queue):
     with self.conn_or_acquire() as client:
         for pri in PRIORITY_STEPS:
             item = client.rpop(self._q_for_pri(queue, pri))
             if item:
                 return loads(bytes_to_str(item))
         raise Empty()
Example #18
0
 def _restore(self, message, leftmost=False):
     tag = message.delivery_tag
     with self.conn_or_acquire() as client:
         P, _ = client.pipeline().hget(self.unacked_key, tag).hdel(self.unacked_key, tag).execute()
         if P:
             M, EX, RK = loads(bytes_to_str(P))  # json is unicode
             self._do_restore_message(M, EX, RK, client, leftmost)
Example #19
0
def _saferepr(o, maxlen=None, maxlevels=3, seen=None):
    stack = deque([iter([o])])
    for token, it in reprstream(stack, seen=seen, maxlevels=maxlevels):
        if maxlen is not None and maxlen <= 0:
            yield ", ..."
            # move rest back to stack, so that we can include
            # dangling parens.
            stack.append(it)
            break
        if isinstance(token, _literal):
            val = token.value
        elif isinstance(token, _key):
            val = saferepr(token.value, maxlen, maxlevels)
        elif isinstance(token, _quoted):
            val = token.value
            if IS_PY3 and isinstance(val, bytes):  # pragma: no cover
                val = "b'%s'" % (bytes_to_str(truncate_bytes(val, maxlen)),)
            else:
                val = "'%s'" % (truncate(val, maxlen),)
        else:
            val = truncate(token, maxlen)
        yield val
        if maxlen is not None:
            maxlen -= len(val)
    for rest1 in stack:
        # maxlen exceeded, process any dangling parens.
        for rest2 in rest1:
            if isinstance(rest2, _literal) and not rest2.truncate:
                yield rest2.value
Example #20
0
 def _store_result(self, task_id, result, state,
                   traceback=None, request=None, **kwargs):
     meta = {'status': state, 'result': result, 'traceback': traceback,
             'children': self.current_task_children(request),
             'task_id': bytes_to_str(task_id)}
     self.set(self.get_key_for_task(task_id), self.encode(meta))
     return result
Example #21
0
 def restore_by_tag(self, tag, client=None, leftmost=False):
     with self.channel.conn_or_acquire(client) as client:
         p, _, _ = self._remove_from_indices(
             tag, client.pipeline().hget(self.unacked_key, tag)).execute()
         if p:
             M, EX, RK = loads(bytes_to_str(p))  # json is unicode
             self.channel._do_restore_message(M, EX, RK, client, leftmost)
Example #22
0
 def _mget_to_results(self, values, keys):
     if hasattr(values, "items"):
         # client returns dict so mapping preserved.
         return {self._strip_prefix(k): v for k, v in self._filter_ready(items(values))}
     else:
         # client returns list so need to recreate mapping.
         return {bytes_to_str(keys[i]): v for i, v in self._filter_ready(enumerate(values))}
Example #23
0
File: SQS.py Project: celery/kombu
 def _message_to_python(self, message, queue_name, queue):
     payload = loads(bytes_to_str(message.get_body()))
     if queue_name in self._noack_queues:
         queue.delete_message(message)
     else:
         try:
             properties = payload["properties"]
             delivery_info = payload["properties"]["delivery_info"]
         except KeyError:
             # json message not sent by kombu?
             delivery_info = {}
             properties = {"delivery_info": delivery_info}
             payload.update({"body": bytes_to_str(message.get_body()), "properties": properties})
     # set delivery tag to SQS receipt handle
     delivery_info.update({"sqs_message": message, "sqs_queue": queue})
     properties["delivery_tag"] = message.receipt_handle
     return payload
Example #24
0
def decompress(body, content_type):
    """Decompress compressed text.

    :param body: Previously compressed text to uncompress.
    :param content_type: mime-type of compression method used.

    """
    return bytes_to_str(get_decoder(content_type)(body))
Example #25
0
    def _get(self, queue):
        result = self._query(queue, limit=1)
        if not result:
            raise Empty()

        item = result.rows[0].value
        self.client.delete(item)
        return loads(bytes_to_str(item['payload']))
Example #26
0
 def deserialize(self, data):
     """Deserialize data structure from string."""
     assert self._cert_store is not None
     with reraise_errors("Unable to deserialize: {0!r}", (Exception,)):
         payload = self._unpack(data)
         signature, signer, body = (payload["signature"], payload["signer"], payload["body"])
         self._cert_store[signer].verify(body, signature, self._digest)
     return loads(bytes_to_str(body), payload["content_type"], payload["content_encoding"], force=True)
Example #27
0
    def _get(self, queue):
        queue = self._get_queue(queue)
        msg = queue.get()

        if msg is None:
            raise Empty()

        return loads(bytes_to_str(msg))
Example #28
0
 def _message_to_python(self, message, queue_name, queue):
     payload = loads(bytes_to_str(message.get_body()))
     if queue_name in self._noack_queues:
         queue.delete_message(message)
     else:
         payload['properties']['delivery_info'].update({
             'sqs_message': message, 'sqs_queue': queue,
         })
     return payload
Example #29
0
 def _brpop_read(self, **options):
     try:
         try:
             dest__item = self.client.parse_response(self.client.connection, "BRPOP", **options)
         except self.connection_errors:
             # if there's a ConnectionError, disconnect so the next
             # iteration will reconnect automatically.
             self.client.connection.disconnect()
             raise Empty()
         if dest__item:
             dest, item = dest__item
             dest = bytes_to_str(dest).rsplit(self.sep, 1)[0]
             self._rotate_cycle(dest)
             return loads(bytes_to_str(item)), dest
         else:
             raise Empty()
     finally:
         self._in_poll = False
Example #30
0
 def _mget_to_results(self, values, keys):
     if hasattr(values, "items"):
         # client returns dict so mapping preserved.
         return dict((self._strip_prefix(k), self.decode(v)) for k, v in values.iteritems() if v is not None)
     else:
         # client returns list so need to recreate mapping.
         return dict(
             (bytes_to_str(keys[i]), self.decode(value)) for i, value in enumerate(values) if value is not None
         )
Example #31
0
 def encode(self, s):
     return bytes_to_str(base64.b64encode(str_to_bytes(s)))
Example #32
0
 def _key_to_consul_key(self, key):
     key = bytes_to_str(key)
     return key if self.path is None else f'{self.path}/{key}'
Example #33
0
    def test_backend_index_conflicting_document_removed_not_throwing(self, base_datetime_mock, es_datetime_mock):
        expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None)
        es_datetime_mock.utcnow.return_value = expected_dt

        expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None)
        base_datetime_mock.utcnow.return_value = expected_done_dt

        self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry
        try:
            x = ElasticsearchBackend(app=self.app)

            task_id = str(sentinel.task_id)
            encoded_task_id = bytes_to_str(x.get_key_for_task(task_id))
            result = str(sentinel.result)

            sleep_mock = Mock()
            x._sleep = sleep_mock
            x._server = Mock()
            x._server.index.side_effect = [
                exceptions.ConflictError(409, "concurrent update", {}),
                {'result': 'created'}
            ]

            x._server.get.side_effect = [
                {
                    'found': True,
                    '_source': {'result': _RESULT_RETRY},
                    '_seq_no': 2,
                    '_primary_term': 1,
                },
                {'_index': 'celery', '_type': '_doc', '_id': 'toto', 'found': False},
            ]

            result_meta = x._get_result_meta(result, states.SUCCESS, None, None)
            result_meta['task_id'] = bytes_to_str(task_id)

            expected_result = x.encode(result_meta)

            x.store_result(task_id, result, states.SUCCESS)
            x._server.index.assert_has_calls([
                call(
                    id=encoded_task_id,
                    index=x.index,
                    doc_type=x.doc_type,
                    body={
                        'result': expected_result,
                        '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                    },
                    params={'op_type': 'create'}
                ),
                call(
                    id=encoded_task_id,
                    index=x.index,
                    doc_type=x.doc_type,
                    body={
                        'result': expected_result,
                        '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                    },
                    params={'op_type': 'create'}
                ),
            ])
            x._server.update.assert_not_called()
            sleep_mock.assert_not_called()
        finally:
            self.app.conf.result_backend_always_retry = prev
Example #34
0
def encode(value, compress_object=False, pickle_protocol=DEFAULT_PROTOCOL):
    return bytes_to_str(
        b64encode(
            maybe_compress(pickle.dumps(value, pickle_protocol),
                           compress_object), ))
Example #35
0
 def label(self, step):
     return step and "{0}{1}".format(
         self._get_prefix(step),
         bytes_to_str(
             (step.label or _label(step)).encode("utf-8", "ignore")),
     )
Example #36
0
 def _key_to_consul_key(self, key):
     key = bytes_to_str(key)
     return key if self.path is None else '{0}/{1}'.format(self.path, key)
Example #37
0
 def delete(self, key):
     key = bytes_to_str(key)
     s3_object = self._get_s3_object(key)
     s3_object.delete()
Example #38
0
 def add_cert(self, cert):
     cert_id = bytes_to_str(cert.get_id())
     if cert_id in self._certs:
         raise SecurityError(f'Duplicate certificate: {id!r}')
     self._certs[cert_id] = cert
Example #39
0
 def get_issuer(self):
     """Return issuer (CA) as a string."""
     return ' '.join(
         bytes_to_str(x[1])
         for x in self._cert.get_issuer().get_components())
Example #40
0
def b64encode(s):
    return bytes_to_str(base64encode(str_to_bytes(s)))
Example #41
0
 def __getitem__(self, id):
     """Get certificate by id."""
     try:
         return self._certs[bytes_to_str(id)]
     except KeyError:
         raise SecurityError('Unknown certificate: {0!r}'.format(id))
Example #42
0
 def add_cert(self, cert):
     cert_id = bytes_to_str(cert.get_id())
     if cert_id in self._certs:
         raise SecurityError('Duplicate certificate: {0!r}'.format(id))
     self._certs[cert_id] = cert
Example #43
0
 def test_lots_of_sign(self):
     for i in range(1000):
         rdata = bytes_to_str(base64.urlsafe_b64encode(os.urandom(265)))
         s = self._get_s(KEY1, CERT1, [CERT1])
         assert s.deserialize(s.serialize(rdata)) == rdata
Example #44
0
 def set(self, key, value):
     key = bytes_to_str(key)
     s3_object = self._get_s3_object(key)
     s3_object.put(Body=value)
Example #45
0
 def label(self, step):
     return step and '{}{}'.format(
         self._get_prefix(step),
         bytes_to_str(
             (step.label or _label(step)).encode('utf-8', 'ignore')),
     )
Example #46
0
 def _get(self, queue):
     m = self.Queue.objects.fetch(queue)
     if m:
         return loads(bytes_to_str(m))
     raise Empty()
Example #47
0
 def on_header(self, headers, line):
     try:
         self._header_parser.send((bytes_to_str(line), headers))
     except StopIteration:
         self._header_parser = header_parser()
Example #48
0
    def test_backend_concurrent_update(self, base_datetime_mock,
                                       es_datetime_mock):
        expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None)
        es_datetime_mock.utcnow.return_value = expected_dt

        expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321,
                                             None)
        base_datetime_mock.utcnow.return_value = expected_done_dt

        self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry
        try:
            x = ElasticsearchBackend(app=self.app)

            task_id = str(sentinel.task_id)
            encoded_task_id = bytes_to_str(x.get_key_for_task(task_id))
            result = str(sentinel.result)

            sleep_mock = Mock()
            x._sleep = sleep_mock
            x._server = Mock()
            x._server.index.side_effect = exceptions.ConflictError(
                409, "concurrent update", {})

            x._server.get.side_effect = [
                {
                    'found': True,
                    '_source': {
                        'result':
                        """{"status":"RETRY","result":{"exc_type":"Exception","exc_message":["failed"],"exc_module":"builtins"}}"""
                    },
                    '_seq_no': 2,
                    '_primary_term': 1,
                },
                {
                    'found': True,
                    '_source': {
                        'result':
                        """{"status":"RETRY","result":{"exc_type":"Exception","exc_message":["failed"],"exc_module":"builtins"}}"""
                    },
                    '_seq_no': 2,
                    '_primary_term': 1,
                },
                {
                    'found': True,
                    '_source': {
                        'result':
                        """{"status":"FAILURE","result":{"exc_type":"Exception","exc_message":["failed"],"exc_module":"builtins"}}"""
                    },
                    '_seq_no': 3,
                    '_primary_term': 1,
                },
                {
                    'found': True,
                    '_source': {
                        'result':
                        """{"status":"FAILURE","result":{"exc_type":"Exception","exc_message":["failed"],"exc_module":"builtins"}}"""
                    },
                    '_seq_no': 3,
                    '_primary_term': 1,
                },
            ]

            x._server.update.side_effect = [{
                'result': 'noop'
            }, {
                'result': 'updated'
            }]
            result_meta = x._get_result_meta(result, states.SUCCESS, None,
                                             None)
            result_meta['task_id'] = bytes_to_str(task_id)

            expected_result = x.encode(result_meta)

            x.store_result(task_id, result, states.SUCCESS)
            x._server.index.assert_has_calls([
                call(id=encoded_task_id,
                     index=x.index,
                     doc_type=x.doc_type,
                     body={
                         'result': expected_result,
                         '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                     },
                     params={'op_type': 'create'}),
                call(id=encoded_task_id,
                     index=x.index,
                     doc_type=x.doc_type,
                     body={
                         'result': expected_result,
                         '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                     },
                     params={'op_type': 'create'}),
            ])
            x._server.update.assert_has_calls([
                call(id=encoded_task_id,
                     index=x.index,
                     doc_type=x.doc_type,
                     body={
                         'doc': {
                             'result': expected_result,
                             '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                         }
                     },
                     params={
                         'if_seq_no': 2,
                         'if_primary_term': 1
                     }),
                call(id=encoded_task_id,
                     index=x.index,
                     doc_type=x.doc_type,
                     body={
                         'doc': {
                             'result': expected_result,
                             '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                         }
                     },
                     params={
                         'if_seq_no': 3,
                         'if_primary_term': 1
                     }),
            ])

            assert sleep_mock.call_count == 1
        finally:
            self.app.conf.result_backend_always_retry = prev
Example #49
0
    def test_backend_index_corrupted_conflicting_document(self, base_datetime_mock, es_datetime_mock):
        expected_dt = datetime.datetime(2020, 6, 1, 18, 43, 24, 123456, None)
        es_datetime_mock.utcnow.return_value = expected_dt

        expected_done_dt = datetime.datetime(2020, 6, 1, 18, 45, 34, 654321, None)
        base_datetime_mock.utcnow.return_value = expected_done_dt

        # self.app.conf.result_backend_always_retry, prev = True, self.app.conf.result_backend_always_retry
        # try:
        x = ElasticsearchBackend(app=self.app)

        task_id = str(sentinel.task_id)
        encoded_task_id = bytes_to_str(x.get_key_for_task(task_id))
        result = str(sentinel.result)

        sleep_mock = Mock()
        x._sleep = sleep_mock
        x._server = Mock()
        x._server.index.side_effect = [
            exceptions.ConflictError(409, "concurrent update", {})
        ]

        x._server.update.side_effect = [
            {'result': 'updated'}
        ]

        x._server.get.return_value = {
            'found': True,
            '_source': {},
            '_seq_no': 2,
            '_primary_term': 1,
        }

        result_meta = x._get_result_meta(result, states.SUCCESS, None, None)
        result_meta['task_id'] = bytes_to_str(task_id)

        expected_result = x.encode(result_meta)

        x.store_result(task_id, result, states.SUCCESS)
        x._server.index.assert_called_once_with(
            id=encoded_task_id,
            index=x.index,
            doc_type=x.doc_type,
            body={
                'result': expected_result,
                '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
            },
            params={'op_type': 'create'}
        )
        x._server.update.assert_called_once_with(
            id=encoded_task_id,
            index=x.index,
            doc_type=x.doc_type,
            body={
                'doc': {
                    'result': expected_result,
                    '@timestamp': expected_dt.isoformat()[:-3] + 'Z'
                }
            },
            params={'if_primary_term': 1, 'if_seq_no': 2}
        )
        sleep_mock.assert_not_called()
Example #50
0
 def get_serial_number(self):
     """Return the serial number in the certificate."""
     return bytes_to_str(self._cert.get_serial_number())
Example #51
0
 def P(s):
     print(bytes_to_str(s), file=fh)
Example #52
0
 def get(self, key):
     key = bytes_to_str(key)
     try:
         return self.connection.get(key)['value']
     except pycouchdb.exceptions.NotFound:
         return None
Example #53
0
    def _setup_request(self, curl, request, buffer, headers, _pycurl=pycurl):
        setopt = curl.setopt
        setopt(_pycurl.URL, bytes_to_str(request.url))

        # see tornado curl client
        request.headers.setdefault('Expect', '')
        request.headers.setdefault('Pragma', '')

        setopt(
            _pycurl.HTTPHEADER,
            ['{}: {}'.format(*h) for h in request.headers.items()],
        )

        setopt(
            _pycurl.HEADERFUNCTION,
            partial(request.on_header or self.on_header, request.headers),
        )
        setopt(
            _pycurl.WRITEFUNCTION,
            request.on_stream or buffer.write,
        )
        setopt(
            _pycurl.FOLLOWLOCATION,
            request.follow_redirects,
        )
        setopt(
            _pycurl.USERAGENT,
            bytes_to_str(request.user_agent or DEFAULT_USER_AGENT),
        )
        if request.network_interface:
            setopt(_pycurl.INTERFACE, request.network_interface)
        setopt(
            _pycurl.ENCODING,
            'gzip,deflate' if request.use_gzip else 'none',
        )
        if request.proxy_host:
            if not request.proxy_port:
                raise ValueError('Request with proxy_host but no proxy_port')
            setopt(_pycurl.PROXY, request.proxy_host)
            setopt(_pycurl.PROXYPORT, request.proxy_port)
            if request.proxy_username:
                setopt(
                    _pycurl.PROXYUSERPWD,
                    '{}:{}'.format(request.proxy_username,
                                   request.proxy_password or ''))
        else:
            setopt(_pycurl.PROXY, '')
            curl.unsetopt(_pycurl.PROXYUSERPWD)

        setopt(_pycurl.SSL_VERIFYPEER, 1 if request.validate_cert else 0)
        setopt(_pycurl.SSL_VERIFYHOST, 2 if request.validate_cert else 0)
        if request.ca_certs is not None:
            setopt(_pycurl.CAINFO, request.ca_certs)

        setopt(_pycurl.IPRESOLVE, pycurl.IPRESOLVE_WHATEVER)

        for meth in METH_TO_CURL.values():
            setopt(meth, False)
        try:
            meth = METH_TO_CURL[request.method]
        except KeyError:
            curl.setopt(_pycurl.CUSTOMREQUEST, request.method)
        else:
            curl.unsetopt(_pycurl.CUSTOMREQUEST)
            setopt(meth, True)

        if request.method in ('POST', 'PUT'):
            body = request.body.encode('utf-8') if request.body else bytes()
            reqbuffer = BytesIO(body)
            setopt(_pycurl.READFUNCTION, reqbuffer.read)
            if request.method == 'POST':

                def ioctl(cmd):
                    if cmd == _pycurl.IOCMD_RESTARTREAD:
                        reqbuffer.seek(0)

                setopt(_pycurl.IOCTLFUNCTION, ioctl)
                setopt(_pycurl.POSTFIELDSIZE, len(body))
            else:
                setopt(_pycurl.INFILESIZE, len(body))
        elif request.method == 'GET':
            assert not request.body

        if request.auth_username is not None:
            auth_mode = {
                'basic': _pycurl.HTTPAUTH_BASIC,
                'digest': _pycurl.HTTPAUTH_DIGEST
            }[request.auth_mode or 'basic']
            setopt(_pycurl.HTTPAUTH, auth_mode)
            userpwd = '{}:{}'.format(
                request.auth_username,
                request.auth_password or '',
            )
            setopt(_pycurl.USERPWD, userpwd)
        else:
            curl.unsetopt(_pycurl.USERPWD)

        if request.client_cert is not None:
            setopt(_pycurl.SSLCERT, request.client_cert)
        if request.client_key is not None:
            setopt(_pycurl.SSLKEY, request.client_key)

        if request.on_prepare is not None:
            request.on_prepare(curl)