Exemplo n.º 1
0
def migrate_task(producer,
                 body_,
                 message,
                 remove_props=[
                     "application_headers", "content_type", "content_encoding"
                 ]):
    body = ensure_bytes(message.body)  # use raw message body.
    info, headers, props = (message.delivery_info, message.headers,
                            message.properties)
    ctype, enc = message.content_type, message.content_encoding
    # remove compression header, as this will be inserted again
    # when the message is recompressed.
    compression = headers.pop("compression", None)

    for key in remove_props:
        props.pop(key, None)

    producer.publish(ensure_bytes(body),
                     exchange=info["exchange"],
                     routing_key=info["routing_key"],
                     compression=compression,
                     headers=headers,
                     content_type=ctype,
                     content_encoding=enc,
                     **props)
Exemplo n.º 2
0
 def __init__(self, key, password=None):
     with reraise_errors('Invalid private key: {0!r}',
                         errors=(ValueError, )):
         self._key = serialization.load_pem_private_key(
             ensure_bytes(key),
             password=ensure_bytes(password),
             backend=default_backend())
Exemplo n.º 3
0
 def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
     values = b64decode(ensure_bytes(payload)).split(sep)
     return {'signer': bytes_to_str(values[0]),
             'signature': ensure_bytes(values[1]),
             'content_type': bytes_to_str(values[2]),
             'content_encoding': bytes_to_str(values[3]),
             'body': ensure_bytes(values[4])}
Exemplo n.º 4
0
def republish(producer,
              message,
              exchange=None,
              routing_key=None,
              remove_props=None):
    """Republish message."""
    if not remove_props:
        remove_props = [
            "application_headers",
            "content_type",
            "content_encoding",
            "headers",
        ]
    body = ensure_bytes(message.body)  # use raw message body.
    info, headers, props = (message.delivery_info, message.headers,
                            message.properties)
    exchange = info["exchange"] if exchange is None else exchange
    routing_key = info["routing_key"] if routing_key is None else routing_key
    ctype, enc = message.content_type, message.content_encoding
    # remove compression header, as this will be inserted again
    # when the message is recompressed.
    compression = headers.pop("compression", None)

    for key in remove_props:
        props.pop(key, None)

    producer.publish(ensure_bytes(body),
                     exchange=exchange,
                     routing_key=routing_key,
                     compression=compression,
                     headers=headers,
                     content_type=ctype,
                     content_encoding=enc,
                     **props)
Exemplo n.º 5
0
def republish(producer,
              message,
              exchange=None,
              routing_key=None,
              remove_props=[
                  'application_headers', 'content_type', 'content_encoding',
                  'headers'
              ]):
    body = ensure_bytes(message.body)  # use raw message body.
    info, headers, props = (message.delivery_info, message.headers,
                            message.properties)
    exchange = info['exchange'] if exchange is None else exchange
    routing_key = info['routing_key'] if routing_key is None else routing_key
    ctype, enc = message.content_type, message.content_encoding
    # remove compression header, as this will be inserted again
    # when the message is recompressed.
    compression = headers.pop('compression', None)

    for key in remove_props:
        props.pop(key, None)

    producer.publish(ensure_bytes(body),
                     exchange=exchange,
                     routing_key=routing_key,
                     compression=compression,
                     headers=headers,
                     content_type=ctype,
                     content_encoding=enc,
                     **props)
Exemplo n.º 6
0
def migrate_task(producer, body_, message, queues=None,
        remove_props=['application_headers',
                      'content_type',
                      'content_encoding',
                      'headers']):
    queues = {} if queues is None else queues
    body = ensure_bytes(message.body)  # use raw message body.
    info, headers, props = (message.delivery_info,
                            message.headers,
                            message.properties)
    ctype, enc = message.content_type, message.content_encoding
    # remove compression header, as this will be inserted again
    # when the message is recompressed.
    compression = headers.pop('compression', None)

    for key in remove_props:
        props.pop(key, None)

    exchange = queues.get(info['exchange'], info['exchange'])
    routing_key = queues.get(info['routing_key'], info['routing_key'])

    producer.publish(ensure_bytes(body), exchange=exchange,
                           routing_key=routing_key,
                           compression=compression,
                           headers=headers,
                           content_type=ctype,
                           content_encoding=enc,
                           **props)
Exemplo n.º 7
0
    def test_get_result_meta_encoded(self):
        self.app.conf.result_extended = True
        b1 = BaseBackend(self.app)
        args = ['a', 'b']
        kwargs = {'foo': 'bar'}

        request = Context(args=args, kwargs=kwargs)
        meta = b1._get_result_meta(result={'fizz': 'buzz'},
                                   state=states.SUCCESS, traceback=None,
                                   request=request, encode=True)
        assert meta['args'] == ensure_bytes(b1.encode(args))
        assert meta['kwargs'] == ensure_bytes(b1.encode(kwargs))
Exemplo n.º 8
0
    def test_migrate(self, app, name='testcelery'):
        connection_kwargs = {'transport_options': {'polling_interval': 0.01}}
        x = Connection('memory://foo', **connection_kwargs)
        y = Connection('memory://foo', **connection_kwargs)
        # use separate state
        x.default_channel.queues = {}
        y.default_channel.queues = {}

        ex = Exchange(name, 'direct')
        q = Queue(name, exchange=ex, routing_key=name)
        q(x.default_channel).declare()
        Producer(x).publish('foo', exchange=name, routing_key=name)
        Producer(x).publish('bar', exchange=name, routing_key=name)
        Producer(x).publish('baz', exchange=name, routing_key=name)
        assert x.default_channel.queues
        assert not y.default_channel.queues
        migrate_tasks(x, y, accept=['text/plain'], app=app)

        yq = q(y.default_channel)
        assert yq.get().body == ensure_bytes('foo')
        assert yq.get().body == ensure_bytes('bar')
        assert yq.get().body == ensure_bytes('baz')

        Producer(x).publish('foo', exchange=name, routing_key=name)
        callback = Mock()
        migrate_tasks(x, y, callback=callback, accept=['text/plain'], app=app)
        callback.assert_called()
        migrate = Mock()
        Producer(x).publish('baz', exchange=name, routing_key=name)
        migrate_tasks(x,
                      y,
                      callback=callback,
                      migrate=migrate,
                      accept=['text/plain'],
                      app=app)
        migrate.assert_called()

        with patch('kombu.transport.virtual.Channel.queue_declare') as qd:

            def effect(*args, **kwargs):
                if kwargs.get('passive'):
                    raise ChannelError('some channel error')
                return 0, 3, 0

            qd.side_effect = effect
            migrate_tasks(x, y, app=app)

        x = Connection('memory://', **connection_kwargs)
        x.default_channel.queues = {}
        y.default_channel.queues = {}
        callback = Mock()
        migrate_tasks(x, y, callback=callback, accept=['text/plain'], app=app)
        callback.assert_not_called()
Exemplo n.º 9
0
    def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        sig_len = signer_cert._cert.get_pubkey().bits() >> 3
        signature = raw_payload[
            first_sep + len(sep):first_sep + len(sep) + sig_len
        ]
        end_of_sig = first_sep + len(sep) + sig_len+len(sep)

        v = raw_payload[end_of_sig:].split(sep)

        values = [bytes_to_str(signer), bytes_to_str(signature),
                  bytes_to_str(v[0]), bytes_to_str(v[1]), bytes_to_str(v[2])]

        return {
            'signer': values[0],
            'signature': values[1],
            'content_type': values[2],
            'content_encoding': values[3],
            'body': values[4],
        }
Exemplo n.º 10
0
    def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        # shift 3 bits right to get signature length
        # 2048bit rsa key has a signature length of 256
        # 4096bit rsa key has a signature length of 512
        sig_len = signer_cert.get_pubkey().key_size >> 3
        sep_len = len(sep)
        signature_start_position = first_sep + sep_len
        signature_end_position = signature_start_position + sig_len
        signature = raw_payload[
            signature_start_position:signature_end_position]

        v = raw_payload[signature_end_position + sep_len:].split(sep)

        return {
            'signer': signer,
            'signature': signature,
            'content_type': bytes_to_str(v[0]),
            'content_encoding': bytes_to_str(v[1]),
            'body': v[2],
        }
Exemplo n.º 11
0
    def _get_result_meta(self,
                         result,
                         state,
                         traceback,
                         request,
                         format_date=True,
                         encode=False):
        if state in self.READY_STATES:
            date_done = datetime.utcnow()
            if format_date:
                date_done = date_done.isoformat()
        else:
            date_done = None

        meta = {
            'status': state,
            'result': result,
            'traceback': traceback,
            'children': self.current_task_children(request),
            'date_done': date_done,
        }

        if request and getattr(request, 'group', None):
            meta['group_id'] = request.group
        if request and getattr(request, 'parent_id', None):
            meta['parent_id'] = request.parent_id

        if self.app.conf.find_value_for_key('extended', 'result'):
            if request:
                request_meta = {
                    'name':
                    getattr(request, 'task', None),
                    'args':
                    getattr(request, 'args', None),
                    'kwargs':
                    getattr(request, 'kwargs', None),
                    'worker':
                    getattr(request, 'hostname', None),
                    'retries':
                    getattr(request, 'retries', None),
                    'queue':
                    request.delivery_info.get('routing_key')
                    if hasattr(request, 'delivery_info')
                    and request.delivery_info else None,
                }
                if getattr(request, 'stamps'):
                    request_meta['stamped_headers'] = request.stamped_headers
                    request_meta.update(request.stamps)

                if encode:
                    # args and kwargs need to be encoded properly before saving
                    encode_needed_fields = {"args", "kwargs"}
                    for field in encode_needed_fields:
                        value = request_meta[field]
                        encoded_value = self.encode(value)
                        request_meta[field] = ensure_bytes(encoded_value)

                meta.update(request_meta)

        return meta
Exemplo n.º 12
0
 def _strip_prefix(self, key):
     """Takes bytes, emits string."""
     key = ensure_bytes(key)
     for prefix in self.task_keyprefix, self.group_keyprefix:
         if key.startswith(prefix):
             return bytes_to_str(key[len(prefix):])
     return bytes_to_str(key)
Exemplo n.º 13
0
 def _pack(self, body, content_type, content_encoding, signer, signature,
           sep=str_to_bytes('\x00\x01')):
     fields = sep.join(
         ensure_bytes(s) for s in [signer, signature, content_type,
                                   content_encoding, body]
     )
     return b64encode(fields)
Exemplo n.º 14
0
    def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        sig_len = signer_cert._cert.get_pubkey().bits() >> 3
        signature = raw_payload[first_sep + len(sep):first_sep + len(sep) +
                                sig_len]
        end_of_sig = first_sep + len(sep) + sig_len + len(sep)

        v = raw_payload[end_of_sig:].split(sep)

        values = [
            bytes_to_str(signer),
            bytes_to_str(signature),
            bytes_to_str(v[0]),
            bytes_to_str(v[1]),
            bytes_to_str(v[2])
        ]

        return {
            'signer': values[0],
            'signature': values[1],
            'content_type': values[2],
            'content_encoding': values[3],
            'body': values[4],
        }
Exemplo n.º 15
0
 def _strip_prefix(self, key):
     """Takes bytes, emits string."""
     key = ensure_bytes(key)
     for prefix in self.task_keyprefix, self.group_keyprefix:
         if key.startswith(prefix):
             return bytes_to_str(key[len(prefix):])
     return bytes_to_str(key)
Exemplo n.º 16
0
    def _unpack(self, payload, sep=str_to_bytes('\x00\x01')):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        # shift 3 bits right to get signature length
        # 2048bit rsa key has a signature length of 256
        # 4096bit rsa key has a signature length of 512
        sig_len = signer_cert.get_pubkey().key_size >> 3
        sep_len = len(sep)
        signature_start_position = first_sep + sep_len
        signature_end_position = signature_start_position + sig_len
        signature = raw_payload[
            signature_start_position:signature_end_position
        ]

        v = raw_payload[signature_end_position + sep_len:].split(sep)

        return {
            'signer': signer,
            'signature': signature,
            'content_type': bytes_to_str(v[0]),
            'content_encoding': bytes_to_str(v[1]),
            'body': bytes_to_str(v[2]),
        }
Exemplo n.º 17
0
 def _pack(self, body, content_type, content_encoding, signer, signature,
           sep=str_to_bytes('\x00\x01')):
     fields = sep.join(
         ensure_bytes(s) for s in [signer, signature, content_type,
                                   content_encoding, body]
     )
     return b64encode(fields)
Exemplo n.º 18
0
 def __init__(self, key, password=None):
     with reraise_errors(
         'Invalid private key: {0!r}', errors=(ValueError,)
     ):
         self._key = serialization.load_pem_private_key(
             ensure_bytes(key),
             password=password,
             backend=default_backend())
Exemplo n.º 19
0
    def sign(self, data, digest):
        """Sign string containing data."""
        with reraise_errors('Unable to sign data: {0!r}'):

            padd = padding.PSS(mgf=padding.MGF1(digest),
                               salt_length=padding.PSS.MAX_LENGTH)

            return self._key.sign(ensure_bytes(data), padd, digest)
Exemplo n.º 20
0
    def _get_result_meta(self,
                         result,
                         state,
                         traceback,
                         request,
                         format_date=True,
                         encode=False):
        if state in self.READY_STATES:
            date_done = datetime.utcnow()
            if format_date:
                date_done = date_done.isoformat()
        else:
            date_done = None

        meta = {
            "status": state,
            "result": result,
            "traceback": traceback,
            "children": self.current_task_children(request),
            "date_done": date_done,
        }

        if request and getattr(request, "group", None):
            meta["group_id"] = request.group
        if request and getattr(request, "parent_id", None):
            meta["parent_id"] = request.parent_id

        if self.app.conf.find_value_for_key("extended", "result"):
            if request:
                request_meta = {
                    "name":
                    getattr(request, "task", None),
                    "args":
                    getattr(request, "args", None),
                    "kwargs":
                    getattr(request, "kwargs", None),
                    "worker":
                    getattr(request, "hostname", None),
                    "retries":
                    getattr(request, "retries", None),
                    "queue":
                    request.delivery_info.get("routing_key")
                    if hasattr(request, "delivery_info")
                    and request.delivery_info else None,
                }

                if encode:
                    # args and kwargs need to be encoded properly before saving
                    encode_needed_fields = {"args", "kwargs"}
                    for field in encode_needed_fields:
                        value = request_meta[field]
                        encoded_value = self.encode(value)
                        request_meta[field] = ensure_bytes(encoded_value)

                meta.update(request_meta)

        return meta
Exemplo n.º 21
0
def compress(body, content_type):
    """Compress text.

    Arguments:
        body (AnyStr): The text to compress.
        content_type (str): mime-type of compression method to use.
    """
    encoder, content_type = get_encoder(content_type)
    return encoder(ensure_bytes(body)), content_type
Exemplo n.º 22
0
    def verify(self, data, signature, digest):
        """Verify signature for string containing data."""
        with reraise_errors("Bad signature: {0!r}"):

            padd = padding.PSS(mgf=padding.MGF1(digest),
                               salt_length=padding.PSS.MAX_LENGTH)

            self.get_pubkey().verify(signature, ensure_bytes(data), padd,
                                     digest)
Exemplo n.º 23
0
def compress(body, content_type):
    """Compress text.

    :param body: The text to compress.
    :param content_type: mime-type of compression method to use.

    """
    encoder, content_type = get_encoder(content_type)
    return encoder(ensure_bytes(body)), content_type
Exemplo n.º 24
0
    def sign(self, data, digest):
        """Sign string containing data."""
        with reraise_errors('Unable to sign data: {0!r}'):

            padd = padding.PSS(
                mgf=padding.MGF1(digest),
                salt_length=padding.PSS.MAX_LENGTH)

            return self._key.sign(ensure_bytes(data), padd, digest)
Exemplo n.º 25
0
def compress(body, content_type):
    """Compress text.

    :param body: The text to compress.
    :param content_type: mime-type of compression method to use.

    """
    encoder, content_type = get_encoder(content_type)
    return encoder(ensure_bytes(body)), content_type
Exemplo n.º 26
0
def compress(body, content_type):
    """Compress text.

    Arguments:
        body (AnyStr): The text to compress.
        content_type (str): mime-type of compression method to use.
    """
    encoder, content_type = get_encoder(content_type)
    return encoder(ensure_bytes(body)), content_type
Exemplo n.º 27
0
 def _save_group(self, group_id, result):
     """Store the result of an executed group."""
     session = self.ResultSession()
     with session_cleanup(session):
         group = TaskSet(group_id, ensure_bytes(self.encode(result)))
         session.add(group)
         session.flush()
         session.commit()
         return result
Exemplo n.º 28
0
    def verify(self, data, signature, digest):
        """Verify signature for string containing data."""
        with reraise_errors('Bad signature: {0!r}'):

            padd = padding.PSS(
                mgf=padding.MGF1(digest),
                salt_length=padding.PSS.MAX_LENGTH)

            self.get_pubkey().verify(signature,
                                     ensure_bytes(data), padd, digest)
Exemplo n.º 29
0
 def __inner(*args, **kwargs):
     for module in modules:
         if isinstance(module, string_t):
             if not PY3:
                 module = ensure_bytes(module)
             module = types.ModuleType(module)
         sys.modules[module.__name__] = module
         try:
             return fun(*args, **kwargs)
         finally:
             sys.modules.pop(module.__name__, None)
Exemplo n.º 30
0
 def _update_result(self, task, result, state, traceback=None,
                    request=None):
     task.result = result
     task.status = state
     task.traceback = traceback
     if self.app.conf.find_value_for_key('extended', 'result'):
         task.name = getattr(request, 'task_name', None)
         task.args = ensure_bytes(
             self.encode(getattr(request, 'args', None))
         )
         task.kwargs = ensure_bytes(
             self.encode(getattr(request, 'kwargs', None))
         )
         task.worker = getattr(request, 'hostname', None)
         task.retries = getattr(request, 'retries', None)
         task.queue = (
             request.delivery_info.get("routing_key")
             if hasattr(request, "delivery_info") and request.delivery_info
             else None
         )
Exemplo n.º 31
0
def republish(producer, message, exchange=None, routing_key=None,
              remove_props=['application_headers',
                            'content_type',
                            'content_encoding',
                            'headers']):
    body = ensure_bytes(message.body)  # use raw message body.
    info, headers, props = (message.delivery_info,
                            message.headers, message.properties)
    exchange = info['exchange'] if exchange is None else exchange
    routing_key = info['routing_key'] if routing_key is None else routing_key
    ctype, enc = message.content_type, message.content_encoding
    # remove compression header, as this will be inserted again
    # when the message is recompressed.
    compression = headers.pop('compression', None)

    for key in remove_props:
        props.pop(key, None)

    producer.publish(ensure_bytes(body), exchange=exchange,
                     routing_key=routing_key, compression=compression,
                     headers=headers, content_type=ctype,
                     content_encoding=enc, **props)
Exemplo n.º 32
0
def migrate_task(producer, body_, message,
        remove_props=["application_headers",
                      "content_type",
                      "content_encoding"]):
    body = ensure_bytes(message.body)  # use raw message body.
    info, headers, props = (message.delivery_info,
                            message.headers,
                            message.properties)
    ctype, enc = message.content_type, message.content_encoding
    # remove compression header, as this will be inserted again
    # when the message is recompressed.
    compression = headers.pop("compression", None)

    for key in remove_props:
        props.pop(key, None)

    producer.publish(ensure_bytes(body), exchange=info["exchange"],
                           routing_key=info["routing_key"],
                           compression=compression,
                           headers=headers,
                           content_type=ctype,
                           content_encoding=enc,
                           **props)
Exemplo n.º 33
0
 def serialize(self, data):
     """Serialize data structure into string."""
     assert self._key is not None
     assert self._cert is not None
     with reraise_errors('Unable to serialize: {0!r}', (Exception,)):
         content_type, content_encoding, body = dumps(
             bytes_to_str(data), serializer=self._serializer)
         # What we sign is the serialized body, not the body itself.
         # this way the receiver doesn't have to decode the contents
         # to verify the signature (and thus avoiding potential flaws
         # in the decoding step).
         body = ensure_bytes(body)
         return self._pack(body, content_type, content_encoding,
                           signature=self._key.sign(body, self._digest),
                           signer=self._cert.get_id())
Exemplo n.º 34
0
 def serialize(self, data):
     """serialize data structure into string"""
     assert self._key is not None
     assert self._cert is not None
     with reraise_errors('Unable to serialize: {0!r}', (Exception, )):
         content_type, content_encoding, body = encode(
             data, serializer=self._serializer)
         # What we sign is the serialized body, not the body itself.
         # this way the receiver doesn't have to decode the contents
         # to verify the signature (and thus avoiding potential flaws
         # in the decoding step).
         body = ensure_bytes(body)
         return self._pack(body, content_type, content_encoding,
                           signature=self._key.sign(body, self._digest),
                           signer=self._cert.get_id())
Exemplo n.º 35
0
 def _store_result(self, task_id, result, state,
                   traceback=None, max_retries=3, **kwargs):
     """Store return value and state of an executed task."""
     session = self.ResultSession()
     with session_cleanup(session):
         task = list(session.query(Task).filter(Task.task_id == task_id))
         task = task and task[0]
         if not task:
             task = Task(task_id)
             session.add(task)
             session.flush()
         task.result = ensure_bytes(self.encode(result))
         task.status = state
         task.traceback = traceback
         session.commit()
         return result
Exemplo n.º 36
0
Arquivo: case.py Projeto: mpermana/pet
 def __inner(*args, **kwargs):
     gen = []
     for module in modules:
         if isinstance(module, string_t):
             if not PY3:
                 module = ensure_bytes(module)
             module = types.ModuleType(module)
         gen.append(module)
         sys.modules[module.__name__] = module
         name = module.__name__
         if '.' in name:
             parent, _, attr = name.rpartition('.')
             setattr(sys.modules[parent], attr, module)
     try:
         return fun(*args, **kwargs)
     finally:
         for module in gen:
             sys.modules.pop(module.__name__, None)
Exemplo n.º 37
0
 def __inner(*args, **kwargs):
     gen = []
     for module in modules:
         if isinstance(module, string_t):
             if not PY3:
                 module = ensure_bytes(module)
             module = types.ModuleType(module)
         gen.append(module)
         sys.modules[module.__name__] = module
         name = module.__name__
         if '.' in name:
             parent, _, attr = name.rpartition('.')
             setattr(sys.modules[parent], attr, module)
     try:
         return fun(*args, **kwargs)
     finally:
         for module in gen:
             sys.modules.pop(module.__name__, None)
Exemplo n.º 38
0
    def _unpack(self, payload, sep=str_to_bytes("\x00\x01")):
        raw_payload = b64decode(ensure_bytes(payload))
        first_sep = raw_payload.find(sep)

        signer = raw_payload[:first_sep]
        signer_cert = self._cert_store[signer]

        sig_len = signer_cert._cert.get_pubkey().bits() >> 3
        signature = raw_payload[first_sep + len(sep) : first_sep + len(sep) + sig_len]
        end_of_sig = first_sep + len(sep) + sig_len + len(sep)

        v = raw_payload[end_of_sig:].split(sep)

        return {
            "signer": signer,
            "signature": signature,
            "content_type": bytes_to_str(v[0]),
            "content_encoding": bytes_to_str(v[1]),
            "body": bytes_to_str(v[2]),
        }
    def serialize(self, data):
        content_type, content_encoding, body = dumps(
            bytes_to_str(data), serializer=self._serializer)

        return b64encode(self.encrypt(ensure_bytes(body)))
Exemplo n.º 40
0
def file_hash(filename, algorithm='md5'):
    hobj = hashlib.new(algorithm)
    with open(filename, 'rb') as f:
        for chunk in iter(lambda: f.read(2**20), ''):
            hobj.update(ensure_bytes(chunk))
    return hobj.digest()
Exemplo n.º 41
0
Arquivo: key.py Projeto: Scalr/celery
 def sign(self, data, digest):
     """Sign string containing data."""
     with reraise_errors('Unable to sign data: {0!r}'):
         return crypto.sign(self._key, ensure_bytes(data), digest)
Exemplo n.º 42
0
 def test_sign(self):
     pkey = PrivateKey(KEY1)
     pkey.sign(ensure_bytes('test'), get_digest_algorithm())
     with pytest.raises(AttributeError):
         pkey.sign(ensure_bytes('test'), get_digest_algorithm('unknown'))
Exemplo n.º 43
0
 def get_key_for_chord(self, group_id):
     """Get the cache key for the chord waiting on group with given id."""
     return self.chord_keyprefix + ensure_bytes(group_id)
Exemplo n.º 44
0
 def get_key_for_chord(self, group_id):
     """Get the cache key for the chord waiting on group with given id."""
     return self.chord_keyprefix + ensure_bytes(group_id)
Exemplo n.º 45
0
 def get_key_for_group(self, group_id):
     """Get the cache key for a group by id."""
     return self.group_keyprefix + ensure_bytes(group_id)
Exemplo n.º 46
0
 def get_key_for_task(self, task_id):
     """Get the cache key for a task by id."""
     return self.task_keyprefix + ensure_bytes(task_id)
Exemplo n.º 47
0
class KeyValueStoreBackend(BaseBackend):
    task_keyprefix = ensure_bytes('celery-task-meta-')
    group_keyprefix = ensure_bytes('celery-taskset-meta-')
    chord_keyprefix = ensure_bytes('chord-unlock-')
    implements_incr = False

    def get(self, key):
        raise NotImplementedError('Must implement the get method.')

    def mget(self, keys):
        raise NotImplementedError('Does not support get_many')

    def set(self, key, value):
        raise NotImplementedError('Must implement the set method.')

    def delete(self, key):
        raise NotImplementedError('Must implement the delete method')

    def incr(self, key):
        raise NotImplementedError('Does not implement incr')

    def expire(self, key, value):
        pass

    def get_key_for_task(self, task_id):
        """Get the cache key for a task by id."""
        return self.task_keyprefix + ensure_bytes(task_id)

    def get_key_for_group(self, group_id):
        """Get the cache key for a group by id."""
        return self.group_keyprefix + ensure_bytes(group_id)

    def get_key_for_chord(self, group_id):
        """Get the cache key for the chord waiting on group with given id."""
        return self.chord_keyprefix + ensure_bytes(group_id)

    def _strip_prefix(self, key):
        """Takes bytes, emits string."""
        key = ensure_bytes(key)
        for prefix in self.task_keyprefix, self.group_keyprefix:
            if key.startswith(prefix):
                return bytes_to_str(key[len(prefix):])
        return bytes_to_str(key)

    def _mget_to_results(self, values, keys):
        if hasattr(values, 'items'):
            # client returns dict so mapping preserved.
            return dict((self._strip_prefix(k), self.decode(v))
                        for k, v in items(values) if v is not None)
        else:
            # client returns list so need to recreate mapping.
            return dict((bytes_to_str(keys[i]), self.decode(value))
                        for i, value in enumerate(values) if value is not None)

    def get_many(self,
                 task_ids,
                 timeout=None,
                 interval=0.5,
                 READY_STATES=states.READY_STATES):
        interval = 0.5 if interval is None else interval
        ids = task_ids if isinstance(task_ids, set) else set(task_ids)
        cached_ids = set()
        cache = self._cache
        for task_id in ids:
            try:
                cached = cache[task_id]
            except KeyError:
                pass
            else:
                if cached['status'] in READY_STATES:
                    yield bytes_to_str(task_id), cached
                    cached_ids.add(task_id)

        ids.difference_update(cached_ids)
        iterations = 0
        while ids:
            keys = list(ids)
            r = self._mget_to_results(
                self.mget([self.get_key_for_task(k) for k in keys]), keys)
            cache.update(r)
            ids.difference_update(set(bytes_to_str(v) for v in r))
            for key, value in items(r):
                yield bytes_to_str(key), value
            if timeout and iterations * interval >= timeout:
                raise TimeoutError('Operation timed out ({0})'.format(timeout))
            time.sleep(interval)  # don't busy loop.
            iterations += 1

    def _forget(self, task_id):
        self.delete(self.get_key_for_task(task_id))

    def _store_result(self,
                      task_id,
                      result,
                      status,
                      traceback=None,
                      request=None,
                      **kwargs):
        meta = {
            'status': status,
            'result': result,
            'traceback': traceback,
            'children': self.current_task_children(request)
        }
        self.set(self.get_key_for_task(task_id), self.encode(meta))
        return result

    def _save_group(self, group_id, result):
        self.set(self.get_key_for_group(group_id),
                 self.encode({'result': result.serializable()}))
        return result

    def _delete_group(self, group_id):
        self.delete(self.get_key_for_group(group_id))

    def _get_task_meta_for(self, task_id):
        """Get task metadata for a task by id."""
        meta = self.get(self.get_key_for_task(task_id))
        if not meta:
            return {'status': states.PENDING, 'result': None}
        return self.decode(meta)

    def _restore_group(self, group_id):
        """Get task metadata for a task by id."""
        meta = self.get(self.get_key_for_group(group_id))
        # previously this was always pickled, but later this
        # was extended to support other serializers, so the
        # structure is kind of weird.
        if meta:
            meta = self.decode(meta)
            result = meta['result']
            meta['result'] = from_serializable(result, self.app)
            return meta

    def on_chord_apply(self, group_id, body, result=None, **kwargs):
        if self.implements_incr:
            self.save_group(group_id, self.app.GroupResult(group_id, result))
        else:
            self.fallback_chord_unlock(group_id, body, result, **kwargs)

    def on_chord_part_return(self, task, propagate=None):
        if not self.implements_incr:
            return
        from celery import maybe_signature
        from celery.result import GroupResult
        app = self.app
        if propagate is None:
            propagate = self.app.conf.CELERY_CHORD_PROPAGATES
        gid = task.request.group
        if not gid:
            return
        key = self.get_key_for_chord(gid)
        try:
            deps = GroupResult.restore(gid, backend=task.backend)
        except Exception as exc:
            callback = maybe_signature(task.request.chord, app=self.app)
            return app._tasks[callback.task].backend.fail_from_current_stack(
                callback.id,
                exc=ChordError('Cannot restore group: {0!r}'.format(exc)),
            )
        if deps is None:
            try:
                raise ValueError(gid)
            except ValueError as exc:
                callback = maybe_signature(task.request.chord, app=self.app)
                task = app._tasks[callback.task]
                return task.backend.fail_from_current_stack(
                    callback.id,
                    exc=ChordError('GroupResult {0} no longer exists'.format(
                        gid, )))
        val = self.incr(key)
        if val >= len(deps):
            callback = maybe_signature(task.request.chord, app=self.app)
            j = deps.join_native if deps.supports_native_join else deps.join
            try:
                ret = j(propagate=propagate)
            except Exception as exc:
                try:
                    culprit = next(deps._failed_join_report())
                    reason = 'Dependency {0.id} raised {1!r}'.format(
                        culprit,
                        exc,
                    )
                except StopIteration:
                    reason = repr(exc)

                app._tasks[callback.task].backend.fail_from_current_stack(
                    callback.id,
                    exc=ChordError(reason),
                )
            else:
                try:
                    callback.delay(ret)
                except Exception as exc:
                    app._tasks[callback.task].backend.fail_from_current_stack(
                        callback.id,
                        exc=ChordError('Callback error: {0!r}'.format(exc)),
                    )
            finally:
                deps.delete()
                self.client.delete(key)
        else:
            self.expire(key, 86400)
Exemplo n.º 48
0
 def set(self, key, value):
     with self.open(self._filename(key), 'wb') as outfile:
         outfile.write(ensure_bytes(value))
Exemplo n.º 49
0
 def set(self, key, value):
     with self.open(self._filename(key), 'wb') as outfile:
         outfile.write(ensure_bytes(value))
Exemplo n.º 50
0
Arquivo: key.py Projeto: xpxu/celery
 def sign(self, data, digest):
     """sign string containing data."""
     with reraise_errors('Unable to sign data: {0!r}'):
         return crypto.sign(self._key, ensure_bytes(data), digest)
Exemplo n.º 51
0
 def get_key_for_task(self, task_id):
     """Get the cache key for a task by id."""
     return self.task_keyprefix + ensure_bytes(task_id)
Exemplo n.º 52
0
 def get_key_for_group(self, group_id):
     """Get the cache key for a group by id."""
     return self.group_keyprefix + ensure_bytes(group_id)
Exemplo n.º 53
0
def file_hash(filename, algorithm='md5'):
    hobj = hashlib.new(algorithm)
    with open(filename, 'rb') as f:
        for chunk in iter(lambda: f.read(2 ** 20), ''):
            hobj.update(ensure_bytes(chunk))
    return hobj.digest()
Exemplo n.º 54
0
 def _put(self, queue, message, **kwargs):
     return self._get_queue(queue).put(
         ensure_bytes(dumps(message)),
         priority=self._get_message_priority(message, reverse=True),
     )
Exemplo n.º 55
0
Arquivo: base.py Projeto: axiak/celery
 def get_key_for_chord(self, taskset_id):
     """Get the cache key for the chord waiting on taskset with given id."""
     return self.chord_keyprefix + ensure_bytes(taskset_id)
Exemplo n.º 56
0
class KeyValueStoreBackend(BaseBackend):
    task_keyprefix = ensure_bytes('celery-task-meta-')
    group_keyprefix = ensure_bytes('celery-taskset-meta-')
    chord_keyprefix = ensure_bytes('chord-unlock-')
    implements_incr = False

    def get(self, key):
        raise NotImplementedError('Must implement the get method.')

    def mget(self, keys):
        raise NotImplementedError('Does not support get_many')

    def set(self, key, value):
        raise NotImplementedError('Must implement the set method.')

    def delete(self, key):
        raise NotImplementedError('Must implement the delete method')

    def incr(self, key):
        raise NotImplementedError('Does not implement incr')

    def expire(self, key, value):
        pass

    def get_key_for_task(self, task_id):
        """Get the cache key for a task by id."""
        return self.task_keyprefix + ensure_bytes(task_id)

    def get_key_for_group(self, group_id):
        """Get the cache key for a group by id."""
        return self.group_keyprefix + ensure_bytes(group_id)

    def get_key_for_chord(self, group_id):
        """Get the cache key for the chord waiting on group with given id."""
        return self.chord_keyprefix + ensure_bytes(group_id)

    def _strip_prefix(self, key):
        """Takes bytes, emits string."""
        key = ensure_bytes(key)
        for prefix in self.task_keyprefix, self.group_keyprefix:
            if key.startswith(prefix):
                return bytes_to_str(key[len(prefix):])
        return bytes_to_str(key)

    def _mget_to_results(self, values, keys):
        if hasattr(values, 'items'):
            # client returns dict so mapping preserved.
            return dict((self._strip_prefix(k), self.decode(v))
                            for k, v in items(values)
                                if v is not None)
        else:
            # client returns list so need to recreate mapping.
            return dict((bytes_to_str(keys[i]), self.decode(value))
                            for i, value in enumerate(values)
                                if value is not None)

    def get_many(self, task_ids, timeout=None, interval=0.5):
        ids = set(task_ids)
        cached_ids = set()
        for task_id in ids:
            try:
                cached = self._cache[task_id]
            except KeyError:
                pass
            else:
                if cached['status'] in states.READY_STATES:
                    yield bytes_to_str(task_id), cached
                    cached_ids.add(task_id)

        ids.difference_update(cached_ids)
        iterations = 0
        while ids:
            keys = list(ids)
            r = self._mget_to_results(self.mget([self.get_key_for_task(k)
                                                    for k in keys]), keys)
            self._cache.update(r)
            ids.difference_update(set(map(bytes_to_str, r)))
            for key, value in items(r):
                yield bytes_to_str(key), value
            if timeout and iterations * interval >= timeout:
                raise TimeoutError('Operation timed out ({0})'.format(timeout))
            time.sleep(interval)  # don't busy loop.
            iterations += 1

    def _forget(self, task_id):
        self.delete(self.get_key_for_task(task_id))

    def _store_result(self, task_id, result, status, traceback=None):
        meta = {'status': status, 'result': result, 'traceback': traceback,
                'children': self.current_task_children()}
        self.set(self.get_key_for_task(task_id), self.encode(meta))
        return result

    def _save_group(self, group_id, result):
        self.set(self.get_key_for_group(group_id),
                 self.encode({'result': result.serializable()}))
        return result

    def _delete_group(self, group_id):
        self.delete(self.get_key_for_group(group_id))

    def _get_task_meta_for(self, task_id):
        """Get task metadata for a task by id."""
        meta = self.get(self.get_key_for_task(task_id))
        if not meta:
            return {'status': states.PENDING, 'result': None}
        return self.decode(meta)

    def _restore_group(self, group_id):
        """Get task metadata for a task by id."""
        meta = self.get(self.get_key_for_group(group_id))
        # previously this was always pickled, but later this
        # was extended to support other serializers, so the
        # structure is kind of weird.
        if meta:
            meta = self.decode(meta)
            result = meta['result']
            if isinstance(result, (list, tuple)):
                return {'result': from_serializable(result)}
            return meta

    def on_chord_apply(self, group_id, body, result=None, **kwargs):
        if self.implements_incr:
            self.app.GroupResult(group_id, result).save()
        else:
            self.fallback_chord_unlock(group_id, body, result, **kwargs)

    def on_chord_part_return(self, task, propagate=False):
        if not self.implements_incr:
            return
        from celery import subtask
        from celery.result import GroupResult
        gid = task.request.group
        if not gid:
            return
        key = self.get_key_for_chord(gid)
        deps = GroupResult.restore(gid, backend=task.backend)
        if deps is None:
            return
        val = self.incr(key)
        if val >= len(deps):
            subtask(task.request.chord).delay(deps.join(propagate=propagate))
            deps.delete()
            self.client.delete(key)
        else:
            self.expire(key, 86400)
Exemplo n.º 57
0
 def __init__(self, cert):
     with reraise_errors(
         'Invalid certificate: {0!r}', errors=(ValueError,)
     ):
         self._cert = load_pem_x509_certificate(
             ensure_bytes(cert), backend=default_backend())