Exemplo n.º 1
0
    def test_rotate_decrypt_no_shared_keys(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2])

        with pytest.raises(InvalidToken):
            mf2.rotate(mf1.encrypt(b"abc"))
Exemplo n.º 2
0
    def test_decrypt(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)
        f = MultiFernet([f1, f2])

        assert f.decrypt(f1.encrypt(b"abc")) == b"abc"
        assert f.decrypt(f2.encrypt(b"abc")) == b"abc"

        with pytest.raises(InvalidToken):
            f.decrypt(b"\x00" * 16)
Exemplo n.º 3
0
 def __init__(self, *master_keys, encrypted_store: dict = None):
     if not len(master_keys):
         raise ValueError('at least one master key must be passed')
     self.crypt = MultiFernet([Fernet(key) for key in master_keys])
     if not encrypted_store:
         self.encrypted_store = dict()
     else:
         self.encrypted_store = encrypted_store
Exemplo n.º 4
0
    def test_rotate_preserves_timestamp(self, backend, monkeypatch):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        later = datetime.datetime.now() + datetime.timedelta(minutes=5)
        later_time = time.mktime(later.timetuple())
        monkeypatch.setattr(time, "time", lambda: later_time)

        original_time, _ = Fernet._get_unverified_token_data(mf1_ciphertext)
        rotated_time, _ = Fernet._get_unverified_token_data(
            mf2.rotate(mf1_ciphertext)
        )

        assert later_time != rotated_time
        assert original_time == rotated_time
Exemplo n.º 5
0
class EncryptingPacker(object):
    """Implement conversion of Python objects to/from encrypted bytestrings.

    :param str key: a `Fernet`_ key to use for encryption and decryption
    :param list old_keys: additional `Fernet`_ keys to use for decryption

    .. note::

        Encrypted messages contain the timestamp at which they were generated
        *in plaintext*. See `our audit`_ for discussion of this and other
        considerations with `Fernet`_.

    .. _Fernet: https://cryptography.io/en/latest/fernet/
    .. _our audit: https://github.com/gratipay/gratipay.com/pull/3998#issuecomment-216227070

    """

    def __init__(self, key, *old_keys):
        keys = [key] + list(old_keys)
        self.fernet = MultiFernet([Fernet(k) for k in keys])

    def pack(self, obj):
        """Given a JSON-serializable object, return a `Fernet`_ token.
        """
        obj = json.dumps(obj)           # serialize to unicode
        obj = obj.encode('utf8')        # convert to bytes
        obj = self.fernet.encrypt(obj)  # encrypt
        return obj

    def unpack(self, token):
        """Given a `Fernet`_ token with JSON in the ciphertext, return a Python object.
        """
        obj = token
        if not type(obj) is bytes:
            raise TypeError("need bytes, got {}".format(type(obj)))
        obj = self.fernet.decrypt(obj)  # decrypt
        obj = obj.decode('utf8')        # convert to unicode
        obj = json.loads(obj)           # deserialize from unicode
        return obj
Exemplo n.º 6
0
 def update(self, **kwargs):
     """Update the arguments, if a ``crypto_key`` is in kwargs then the
     ``self.fernet`` attribute will be initialized"""
     for key, val in kwargs.items():
         if key == "crypto_key":
             fkeys = []
             if not isinstance(val, list):
                 val = [val]
             for v in val:
                 fkeys.append(Fernet(v))
             self.fernet = MultiFernet(fkeys)
         else:
             setattr(self, key, val)
Exemplo n.º 7
0
 def __init__(self, expiry=60, hosts=None, prefix="asgi:", group_expiry=86400, capacity=100, channel_capacity=None,
              symmetric_encryption_keys=None):
     super(RedisChannelLayer, self).__init__(
         expiry=expiry,
         group_expiry=group_expiry,
         capacity=capacity,
         channel_capacity=channel_capacity,
     )
     # Make sure they provided some hosts, or provide a default
     if not hosts:
         hosts = [("localhost", 6379)]
     self.hosts = []
     
     if isinstance(hosts, six.string_types):
         # user accidentally used one host string instead of providing a list of hosts
         raise ValueError('ASGI Redis hosts must be specified as an iterable list of hosts.')
                          
     for entry in hosts:
         if isinstance(entry, six.string_types):
             self.hosts.append(entry)
         else:
             self.hosts.append("redis://%s:%d/0" % (entry[0], entry[1]))
     self.prefix = prefix
     assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
     # Precalculate some values for ring selection
     self.ring_size = len(self.hosts)
     self.ring_divisor = int(math.ceil(4096 / float(self.ring_size)))
     # Create connections ahead of time (they won't call out just yet, but
     # we want to connection-pool them later)
     self._connection_list = self._generate_connections()
     # Decide on a unique client prefix to use in ! sections
     # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
     self.client_prefix = "".join(random.choice(string.ascii_letters) for i in range(8))
     # Register scripts
     connection = self.connection(None)
     self.chansend = connection.register_script(self.lua_chansend)
     self.lpopmany = connection.register_script(self.lua_lpopmany)
     self.delprefix = connection.register_script(self.lua_delprefix)
     # See if we can do encryption if they asked
     if symmetric_encryption_keys:
         if isinstance(symmetric_encryption_keys, six.string_types):
             raise ValueError("symmetric_encryption_keys must be a list of possible keys")
         try:
             from cryptography.fernet import MultiFernet
         except ImportError:
             raise ValueError("Cannot run with encryption without 'cryptography' installed.")
         sub_fernets = [self.make_fernet(key) for key in symmetric_encryption_keys]
         self.crypter = MultiFernet(sub_fernets)
     else:
         self.crypter = None
Exemplo n.º 8
0
 def __init__(self):
     if website.env.aws_secret_access_key:
         sm = self.secrets_manager = boto3.client('secretsmanager', region_name='eu-west-1')
         secret = sm.get_secret_value(SecretId='Fernet')
         rotation_start = secret['CreatedDate'].date()
         keys = secret['SecretString'].split()
     else:
         self.secrets_manager = None
         parts = os.environ['SECRET_FERNET_KEYS'].split()
         rotation_start = date(*map(int, parts[0].split('-')))
         keys = parts[1:]
     self.fernet_rotation_start = rotation_start
     self.fernet_keys = [k.encode('ascii') for k in keys]
     self.fernet = MultiFernet([Fernet(k) for k in self.fernet_keys])
Exemplo n.º 9
0
    def test_rotate(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        assert mf2.decrypt(mf1_ciphertext) == plaintext

        rotated = mf2.rotate(mf1_ciphertext)

        assert rotated != mf1_ciphertext
        assert mf2.decrypt(rotated) == plaintext

        with pytest.raises(InvalidToken):
            mf1.decrypt(rotated)
Exemplo n.º 10
0
    def test_rotate_preserves_timestamp(self, backend, monkeypatch):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        later = datetime.datetime.now() + datetime.timedelta(minutes=5)
        later_time = time.mktime(later.timetuple())
        monkeypatch.setattr(time, "time", lambda: later_time)

        original_time, _ = Fernet._get_unverified_token_data(mf1_ciphertext)
        rotated_time, _ = Fernet._get_unverified_token_data(
            mf2.rotate(mf1_ciphertext))

        assert later_time != rotated_time
        assert original_time == rotated_time
Exemplo n.º 11
0
def create_garble_tables(circuit, p_values, keys):
    for gate in circuit.Gates:
        if (gate.type == "NOT"):
            for input in util.create_all_combination(1):
                p_val = input[0] ^ p_values[gate.input[0]]
                input_keys = keys[gate.input[0]][p_val]
                f = MultiFernet([Fernet(input_keys), Fernet(input_keys)])
                output_value = gate.evaluate(input) ^ p_values[gate.id]
                output_value = (output_value, keys[gate.id][output_value])
                output_value = f.encrypt(pickle.dumps(output_value))
                gate.garbled_table.add_entry([p_val], output_value )
        else:
            for input in util.create_all_combination(2):
                ps = list(map(lambda x: input[x] ^ p_values[gate.input[x]],
                              [0,1]))
                input_keys = list(map(lambda x, y: keys[x][y], gate.input, ps))
                f = MultiFernet(list(map(lambda x: Fernet(x), input_keys)))
                output_value = gate.evaluate(input) ^ p_values[gate.id]
                output_value = (output_value, keys[gate.id][output_value])
                output_value = f.encrypt(pickle.dumps(output_value))
                gate.garbled_table.add_entry(ps, output_value)
Exemplo n.º 12
0
 def test_non_iterable_argument(self, backend):
     with pytest.raises(TypeError):
         MultiFernet(None)
Exemplo n.º 13
0
"""
Project: encryption.
Author:Devam A

Description: This project is test project for encryption and decryption which can be added into shop project.
Status: Learning.
"""

from cryptography.fernet import Fernet, MultiFernet

key1 = Fernet(Fernet.generate_key())
key2 = Fernet(Fernet.generate_key())

f = MultiFernet([key1, key2])

token = f.encrypt(b"My name is Devam Agrawal.")

print(token)

d = f.decrypt(token)

print(d.decode())

name = input("enter your name:-")
address = input("enter your address:-")
Exemplo n.º 14
0
    def test_encrypt(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)
        f = MultiFernet([f1, f2])

        assert f1.decrypt(f.encrypt(b"abc")) == b"abc"
Exemplo n.º 15
0
    def test_encrypt(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)
        f = MultiFernet([f1, f2])

        assert f1.decrypt(f.encrypt(b"abc")) == b"abc"
Exemplo n.º 16
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval", "max_data"]

    def __init__(
        self,
        crypto_key=None,
        datadog_api_key=None,
        datadog_app_key=None,
        datadog_flush_interval=None,
        hostname=None,
        port=None,
        router_scheme=None,
        router_hostname=None,
        router_port=None,
        endpoint_scheme=None,
        endpoint_hostname=None,
        endpoint_port=None,
        proxy_protocol_port=None,
        memusage_port=None,
        router_conf=None,
        router_tablename="router",
        router_read_throughput=5,
        router_write_throughput=5,
        storage_tablename="storage",
        storage_read_throughput=5,
        storage_write_throughput=5,
        message_tablename="message",
        message_read_throughput=5,
        message_write_throughput=5,
        statsd_host="localhost",
        statsd_port=8125,
        resolve_hostname=False,
        max_data=4096,
        # Reflected up from UDP Router
        wake_timeout=0,
        env='development',
        enable_cors=False,
        hello_timeout=0,
        bear_hash_key=None,
        preflight_uaid="deadbeef00000000deadbeef00000000",
        ami_id=None,
        msg_limit=100,
        debug=False,
        connect_timeout=0.5,
        ssl_key=None,
        ssl_cert=None,
        ssl_dh_param=None,
        router_ssl_key=None,
        router_ssl_cert=None,
        client_certs=None,
        auto_ping_interval=None,
        auto_ping_timeout=None,
        max_connections=None,
        close_handshake_timeout=None,
    ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        self.debug = debug
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        if not debug:
            pool._factory = QuietClientFactory

        self.agent = Agent(reactor, connectTimeout=connect_timeout, pool=pool)

        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if bear_hash_key is None:
            bear_hash_key = []
        if not isinstance(bear_hash_key, list):
            bear_hash_key = [bear_hash_key]
        self.bear_hash_key = bear_hash_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.datadog_api_key = datadog_api_key
        self.datadog_app_key = datadog_app_key
        self.datadog_flush_interval = datadog_flush_interval
        self.statsd_host = statsd_host
        self.statsd_port = statsd_port

        self.port = port
        self.router_port = router_port
        self.proxy_protocol_port = proxy_protocol_port
        self.memusage_port = memusage_port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        if router_conf is None:
            router_conf = {}
        self.router_conf = router_conf
        self.router_url = canonical_url(router_scheme or 'http',
                                        self.router_hostname, router_port)

        self.endpoint_url = canonical_url(endpoint_scheme or 'http',
                                          self.endpoint_hostname,
                                          endpoint_port)

        # not accurate under autoendpoint (like router_url)
        self.ws_url = "{}://{}:{}/".format("wss" if ssl_key else "ws",
                                           self.hostname, self.port)

        self.ssl_key = ssl_key
        self.ssl_cert = ssl_cert
        self.ssl_dh_param = ssl_dh_param
        self.router_ssl_key = router_ssl_key
        self.router_ssl_cert = router_ssl_cert

        self.enable_tls_auth = client_certs is not None
        self.client_certs = client_certs

        self.auto_ping_interval = auto_ping_interval
        self.auto_ping_timeout = auto_ping_timeout
        self.max_connections = max_connections
        self.close_handshake_timeout = close_handshake_timeout

        self.router_tablename = router_tablename
        self.router_read_throughput = router_read_throughput
        self.router_write_throughput = router_write_throughput
        self.storage_tablename = storage_tablename
        self.storage_read_throughput = storage_read_throughput
        self.storage_write_throughput = storage_write_throughput
        self.message_tablename = message_tablename
        self.message_read_throughput = message_read_throughput
        self.message_write_throughput = message_write_throughput

        self.msg_limit = msg_limit

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

        self.ami_id = ami_id

        # Generate messages per legacy rules, only used for testing to
        # generate legacy data.
        self._notification_legacy = False
        self.preflight_uaid = preflight_uaid

    @classmethod
    def from_argparse(cls, ns, **kwargs):
        # type: (Namespace, **Any) -> AutopushSettings
        """Create an instance from argparse/additional kwargs"""
        router_conf = {}
        if ns.key_hash:
            db.key_hash = ns.key_hash
        # Some routers require a websocket to timeout on idle
        # (e.g. UDP)
        if ns.wake_pem is not None and ns.wake_timeout != 0:
            router_conf["simplepush"] = {
                "idle": ns.wake_timeout,
                "server": ns.wake_server,
                "cert": ns.wake_pem
            }
        if ns.apns_creds:
            # if you have the critical elements for each external
            # router, create it
            try:
                router_conf["apns"] = json.loads(ns.apns_creds)
            except (ValueError, TypeError):
                raise InvalidSettings(
                    "Invalid JSON specified for APNS config options")
        if ns.gcm_enabled:
            # Create a common gcmclient
            try:
                sender_ids = json.loads(ns.senderid_list)
            except (ValueError, TypeError):
                raise InvalidSettings(
                    "Invalid JSON specified for senderid_list")
            try:
                # This is an init check to verify that things are
                # configured correctly. Otherwise errors may creep in
                # later that go unaccounted.
                sender_ids[sender_ids.keys()[0]]
            except (IndexError, TypeError):
                raise InvalidSettings("No GCM SenderIDs specified or found.")
            router_conf["gcm"] = {
                "ttl": ns.gcm_ttl,
                "dryrun": ns.gcm_dryrun,
                "max_data": ns.max_data,
                "collapsekey": ns.gcm_collapsekey,
                "senderIDs": sender_ids
            }

        client_certs = None
        # endpoint only
        if getattr(ns, 'client_certs', None):
            try:
                client_certs_arg = json.loads(ns.client_certs)
            except (ValueError, TypeError):
                raise InvalidSettings(
                    "Invalid JSON specified for client_certs")
            if client_certs_arg:
                if not ns.ssl_key:
                    raise InvalidSettings("client_certs specified without SSL "
                                          "enabled (no ssl_key specified)")
                client_certs = {}
                for name, sigs in client_certs_arg.iteritems():
                    if not isinstance(sigs, list):
                        raise InvalidSettings(
                            "Invalid JSON specified for client_certs")
                    for sig in sigs:
                        sig = sig.upper()
                        if (not name or not CLIENT_SHA256_RE.match(sig)
                                or sig in client_certs):
                            raise InvalidSettings(
                                "Invalid client_certs argument")
                        client_certs[sig] = name

        if ns.fcm_enabled:
            # Create a common gcmclient
            if not ns.fcm_auth:
                raise InvalidSettings("No Authorization Key found for FCM")
            if not ns.fcm_senderid:
                raise InvalidSettings("No SenderID found for FCM")
            router_conf["fcm"] = {
                "ttl": ns.fcm_ttl,
                "dryrun": ns.fcm_dryrun,
                "max_data": ns.max_data,
                "collapsekey": ns.fcm_collapsekey,
                "auth": ns.fcm_auth,
                "senderid": ns.fcm_senderid
            }

        ami_id = None
        # Not a fan of double negatives, but this makes more
        # understandable args
        if not ns.no_aws:
            ami_id = get_amid()

        return cls(crypto_key=ns.crypto_key,
                   datadog_api_key=ns.datadog_api_key,
                   datadog_app_key=ns.datadog_app_key,
                   datadog_flush_interval=ns.datadog_flush_interval,
                   hostname=ns.hostname,
                   statsd_host=ns.statsd_host,
                   statsd_port=ns.statsd_port,
                   router_conf=router_conf,
                   router_tablename=ns.router_tablename,
                   storage_tablename=ns.storage_tablename,
                   storage_read_throughput=ns.storage_read_throughput,
                   storage_write_throughput=ns.storage_write_throughput,
                   message_tablename=ns.message_tablename,
                   message_read_throughput=ns.message_read_throughput,
                   message_write_throughput=ns.message_write_throughput,
                   router_read_throughput=ns.router_read_throughput,
                   router_write_throughput=ns.router_write_throughput,
                   resolve_hostname=ns.resolve_hostname,
                   wake_timeout=ns.wake_timeout,
                   ami_id=ami_id,
                   client_certs=client_certs,
                   msg_limit=ns.msg_limit,
                   connect_timeout=ns.connection_timeout,
                   memusage_port=ns.memusage_port,
                   ssl_key=ns.ssl_key,
                   ssl_cert=ns.ssl_cert,
                   ssl_dh_param=ns.ssl_dh_param,
                   **kwargs)

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_simplepush_endpoint(self, uaid, chid):
        """Create a simplepush endpoint"""
        root = self.endpoint_url + "/spush/"
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))
        return root + 'v1/' + self.fernet.encrypt(base).strip('=')

    def make_endpoint(self, uaid, chid, key=None):
        """Create an v1 or v2 WebPush endpoint from the identifiers.

        Both endpoints use bytes instead of hex to reduce ID length.
        v1 is the uaid + chid
        v2 is the uaid + chid + sha256(key).bytes

        :param uaid: User Agent Identifier
        :param chid: Channel or Subscription ID
        :param key: Optional Base64 URL-encoded application server key
        :returns: Push endpoint

        """
        root = self.endpoint_url + '/wpush/'
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))

        if key is None:
            return root + 'v1/' + self.fernet.encrypt(base).strip('=')

        raw_key = base64url_decode(key.encode('utf8'))
        ep = self.fernet.encrypt(base + sha256(raw_key).digest()).strip('=')
        return root + 'v2/' + ep

    def parse_endpoint(self,
                       metrics,
                       token,
                       version="v1",
                       ckey_header=None,
                       auth_header=None):
        """Parse an endpoint into component elements of UAID, CHID and optional
        key hash if v2

        :param token: The obscured subscription data.
        :param version: This is the API version of the token.
        :param ckey_header: the Crypto-Key header bearing the public key
            (from Crypto-Key: p256ecdsa=)
        :param auth_header: The Authorization header bearing the VAPID info

        :raises ValueError: In the case of a malformed endpoint.

        :returns: a dict containing (uaid=UAID, chid=CHID, public_key=KEY)

        """
        token = self.fernet.decrypt(repad(token).encode('utf8'))
        public_key = None
        if ckey_header:
            try:
                crypto_key = CryptoKey(ckey_header)
            except CryptoKeyException:
                raise InvalidTokenException("Invalid key data")
            public_key = crypto_key.get_label('p256ecdsa')
        if auth_header:
            vapid_auth = parse_auth_header(auth_header)
            if not vapid_auth:
                raise VapidAuthException("Invalid Auth token")
            metrics.increment("updates.notification.auth.{}".format(
                vapid_auth['scheme']))
            # pull the public key from the VAPID auth header if needed
            try:
                if vapid_auth['version'] != 1:
                    public_key = vapid_auth['k']
            except KeyError:
                raise VapidAuthException("Missing Public Key")
        if version == 'v1' and len(token) != 32:
            raise InvalidTokenException("Corrupted push token")
        if version == 'v2':
            if not auth_header:
                raise VapidAuthException("Missing Authorization Header")
            if len(token) != 64:
                raise InvalidTokenException("Corrupted push token")
            if not public_key:
                raise VapidAuthException("Invalid key data")
            try:
                decoded_key = base64url_decode(public_key)
            except TypeError:
                raise VapidAuthException("Invalid key data")
            if not constant_time.bytes_eq(
                    sha256(decoded_key).digest(), token[32:]):
                raise VapidAuthException("Key mismatch")
        return dict(uaid=token[:16].encode('hex'),
                    chid=token[16:32].encode('hex'),
                    version=version,
                    public_key=public_key)
Exemplo n.º 17
0
class ChannelLayer(BaseChannelLayer):
    """
    Redis channel layer.
    It routes all messages into remote Redis server. Support for
    sharding among different Redis installations and message
    encryption are provided.
    """

    blpop_timeout = 5
    queue_get_timeout = 10

    def __init__(
        self,
        hosts=None,
        prefix="anthill",
        expiry=60,
        group_expiry=86400,
        capacity=100,
        channel_capacity=None,
        symmetric_encryption_keys=None,
    ):
        # Store basic information
        self.expiry = expiry
        self.group_expiry = group_expiry
        self.capacity = capacity
        self.channel_capacity = self.compile_capacities(channel_capacity or {})
        self.prefix = prefix
        assert isinstance(self.prefix, str), "Prefix must be unicode"
        # Cached redis connection pools and the event loop they are from
        self.pools = {}
        self.pools_loop = None
        # Configure the host objects
        self.hosts = self.decode_hosts(hosts)
        self.ring_size = len(self.hosts)
        # Normal channels choose a host index by cycling through the available hosts
        self._receive_index_generator = itertools.cycle(range(len(self.hosts)))
        self._send_index_generator = itertools.cycle(range(len(self.hosts)))
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8))
        # Set up any encryption objects
        self._setup_encryption(symmetric_encryption_keys)
        # Number of coroutines trying to receive right now
        self.receive_count = 0
        # Event loop they are trying to receive on
        self.receive_event_loop = None
        # Main receive loop running
        self.receive_loop_task = None
        # Buffered messages by process-local channel name
        self.receive_buffer = collections.defaultdict(asyncio.Queue)

    def decode_hosts(self, hosts):
        """
        Takes the value of the "hosts" argument passed to the class and returns
        a list of kwargs to use for the Redis connection constructor.
        """
        # If no hosts were provided, return a default value
        if not hosts:
            return [{"address": ("localhost", 6379)}]
        # If they provided just a string, scold them.
        if isinstance(hosts, (str, bytes)):
            raise ValueError(
                "You must pass a list of Redis hosts, even if there is only one."
            )
        # Decode each hosts entry into a kwargs dict
        result = []
        for entry in hosts:
            result.append({"address": entry})
        return result

    def _setup_encryption(self, symmetric_encryption_keys):
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, (str, bytes)):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [
                self.make_fernet(key) for key in symmetric_encryption_keys
            ]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    # Channel layer API #

    extensions = ["groups", "flush"]

    async def send(self, channel, message):
        """
        Send a message onto a (general or specific) channel.
        """
        # Type check
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Make sure the message does not contain reserved keys
        assert "__anthill_channel__" not in message
        # If it's a process-local channel, strip off local part and stick full name in message
        channel_non_local_name = channel
        if "!" in channel:
            message = dict(message.items())
            message["__anthill_channel__"] = channel
            channel_non_local_name = self.non_local_name(channel)
        # Write out message into expiring key (avoids big items in list)
        channel_key = self.prefix + channel_non_local_name
        # Pick a connection to the right server - consistent for specific
        # channels, random for general channels
        if "!" in channel:
            index = self.consistent_hash(channel)
        else:
            index = next(self._send_index_generator)
        async with self.connection(index) as connection:
            # Check the length of the list before send
            # This can allow the list to leak slightly over capacity, but that's fine.
            if await connection.llen(channel_key) >= self.get_capacity(
                    channel):
                raise ChannelFull()
            # Push onto the list then set it to expire in case it's not consumed
            await connection.rpush(channel_key, self.serialize(message))
            await connection.expire(channel_key, int(self.expiry))

    async def receive(self, channel):
        """
        Receive the first message that arrives on the channel.
        If more than one coroutine waits on the same channel, the first waiter
        will be given the message when it arrives.
        """
        # Make sure the channel name is valid then get the non-local part
        # and thus its index
        assert self.valid_channel_name(channel)
        if "!" in channel:
            real_channel = self.non_local_name(channel)
            assert real_channel.endswith(self.client_prefix +
                                         "!"), "Wrong client prefix"
            # Enter receiving section
            loop = asyncio.get_event_loop()
            self.receive_count += 1
            try:
                if self.receive_count == 1:
                    # If we're the first coroutine in, make a receive loop!
                    general_channel = self.non_local_name(channel)
                    self.receive_loop_task = loop.create_task(
                        self.receive_loop(general_channel))
                    self.receive_event_loop = loop
                else:
                    # Otherwise, check our event loop matches
                    if self.receive_event_loop != loop:
                        raise RuntimeError(
                            "Two event loops are trying to receive() on one channel layer at once!"
                        )
                    if self.receive_loop_task.done():
                        # Maybe raise an exception from the task
                        self.receive_loop_task.result()
                        # Raise our own exception if that failed
                        raise RuntimeError("Redis receive loop exited early")

                # Wait for our message to appear
                while True:
                    try:
                        message = await asyncio.wait_for(
                            self.receive_buffer[channel].get(),
                            self.queue_get_timeout)
                        if self.receive_buffer[channel].empty():
                            del self.receive_buffer[channel]
                        return message
                    except asyncio.TimeoutError:
                        # See if we need to propagate a dead receiver exception
                        if self.receive_loop_task.done():
                            self.receive_loop_task.result()

            finally:
                self.receive_count -= 1
                # If we were the last out, stop the receive loop
                if self.receive_count == 0:
                    self.receive_loop_task.cancel()
        else:
            # Do a plain direct receive
            return (await self.receive_single(channel))[1]

    async def receive_loop(self, general_channel):
        """
        Continuous-receiving loop that makes sure something is fetching results
        for the channel passed in.
        """
        assert general_channel.endswith(
            "!"
        ), "receive_loop not called on general queue of process-local channel"
        while True:
            real_channel, message = await self.receive_single(general_channel)
            await self.receive_buffer[real_channel].put(message)

    async def receive_single(self, channel):
        """
        Receives a single message off of the channel and returns it.
        """
        # Check channel name
        assert self.valid_channel_name(channel,
                                       receive=True), "Channel name invalid"
        # Work out the connection to use
        if "!" in channel:
            assert channel.endswith("!")
            index = self.consistent_hash(channel)
        else:
            index = next(self._receive_index_generator)
        # Get that connection and receive off of it
        async with self.connection(index) as connection:
            channel_key = self.prefix + channel
            content = None
            while content is None:
                content = await connection.blpop(channel_key,
                                                 timeout=self.blpop_timeout)
            # Message decode
            message = self.deserialize(content[1])
            # TODO: message expiry?
            # If there is a full channel name stored in the message, unpack it.
            if "__anthill_channel__" in message:
                channel = message["__anthill_channel__"]
                del message["__anthill_channel__"]
            return channel, message

    async def new_channel(self, prefix="specific"):
        """
        Returns a new channel name that can be used by something in our
        process as a specific channel.
        """
        # TODO: Guarantee uniqueness better?
        return "%s.%s!%s" % (
            prefix,
            self.client_prefix,
            "".join(random.choice(string.ascii_letters) for i in range(12)),
        )

    # Flush extension #

    async def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        # Lua deletion script
        delete_prefix = """
            local keys = redis.call('keys', ARGV[1])
            for i=1,#keys,5000 do
                redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
            end
        """
        # Go through each connection and remove all with prefix
        for i in range(self.ring_size):
            async with self.connection(i) as connection:
                await connection.eval(delete_prefix,
                                      keys=[],
                                      args=[self.prefix + "*"])

    # Groups extension #

    async def group_add(self, group, channel):
        """
        Adds the channel name to a group.
        """
        # Check the inputs
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Get a connection to the right shard
        group_key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            # Add to group sorted set with creation time as timestamp
            await connection.zadd(
                group_key,
                time.time(),
                channel,
            )
            # Set expiration to be group_expiry, since everything in
            # it at this point is guaranteed to expire before that
            await connection.expire(group_key, self.group_expiry)

    async def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            await connection.zrem(key, channel)

    async def group_send(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # Retrieve list of all channel names
        key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            # Discard old channels based on group_expiry
            await connection.zremrangebyscore(key,
                                              min=0,
                                              max=int(time.time()) -
                                              self.group_expiry)

            # Return current lot
            channel_names = [
                x.decode("utf8") for x in await connection.zrange(key, 0, -1)
            ]

        connection_to_channels, channel_to_message, channel_to_capacity, channel_to_key = \
            self._map_channel_to_connection(channel_names, message)

        for connection_index, channel_redis_keys in connection_to_channels.items(
        ):
            # Create a LUA script specific for this connection.
            # Make sure to use the message specific to this channel, it is
            # stored in channel_to_message dict and contains the
            # __anthill_channel__ key.
            group_send_lua = """
                for i=1,#KEYS do
                    if redis.call('LLEN', KEYS[i]) < tonumber(ARGV[i + #KEYS]) then
                        redis.call('RPUSH', KEYS[i], ARGV[i])
                        redis.call('EXPIRE', KEYS[i], %d)
                    end
                end
            """ % self.expiry

            # We need to filter the messages to keep those related to the connection
            args = [
                channel_to_message[channel_name]
                for channel_name in channel_names
                if channel_to_key[channel_name] in channel_redis_keys
            ]

            # We need to send the capacity for each channel
            args += [
                channel_to_capacity[channel_name]
                for channel_name in channel_names
                if channel_to_key[channel_name] in channel_redis_keys
            ]

            async with self.connection(connection_index) as connection:
                await connection.eval(group_send_lua,
                                      keys=channel_redis_keys,
                                      args=args)

    def _map_channel_to_connection(self, channel_names, message):
        """
        For a list of channel names, bucket each one to a dict keyed by the
        connection index.
        Also for each channel create a message specific to that channel, adding
        the __anthill_channel__ key to the message.
        We also return a mapping from channel names to their corresponding Redis
        keys, and a mapping of channels to their capacity.
        """
        connection_to_channels = collections.defaultdict(list)
        channel_to_message = dict()
        channel_to_capacity = dict()
        channel_to_key = dict()

        for channel in channel_names:
            channel_non_local_name = channel
            if "!" in channel:
                message = dict(message.items())
                message["__anthill_channel__"] = channel
                channel_non_local_name = self.non_local_name(channel)
            channel_key = self.prefix + channel_non_local_name
            idx = self.consistent_hash(channel_non_local_name)
            connection_to_channels[idx].append(channel_key)
            channel_to_capacity[channel] = self.get_capacity(message)
            channel_to_message[channel] = self.serialize(message)
            # We build a
            channel_to_key[channel] = channel_key

        return connection_to_channels, channel_to_message, channel_to_capacity, channel_to_key

    def _group_key(self, group):
        """
        Common function to make the storage key for the group.
        """
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    # Serialization #

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, raw=False)

    # Internal functions #

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using CRC, then down to one of the ring nodes.
        """
        if isinstance(value, str):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xfff
        ring_divisor = 4096 / float(self.ring_size)
        return int(bigval / ring_divisor)

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, str):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)

    # Connection handling #

    def connection(self, index):
        """
        Returns the correct connection for the index given.
        Lazily instantiates pools.
        """
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" %
                             (self.ring_size, index))
        # Make a context manager
        return self.ConnectionContextManager(self.hosts[index])

    class ConnectionContextManager:
        """
        Async context manager for connections
        """
        def __init__(self, kwargs):
            self.kwargs = kwargs

        async def __aenter__(self):
            self.conn = await aioredis.create_redis(**self.kwargs)
            return self.conn

        async def __aexit__(self, exc_type, exc, tb):
            self.conn.close()
 def Fernet(self):
     try:
         return MultiFernet(EncryptedField._Tokens)
     except:
         raise EncryptedField.KeyIsUndefined
Exemplo n.º 19
0
class RedisChannelLayer(BaseChannelLayer):
    """
    Redis channel layer.

    It routes all messages into remote Redis server. Support for
    sharding among different Redis installations and message
    encryption are provided.
    """

    brpop_timeout = 5

    def __init__(
        self,
        hosts=None,
        prefix="asgi:",
        expiry=60,
        group_expiry=86400,
        capacity=100,
        channel_capacity=None,
        symmetric_encryption_keys=None,
    ):
        # Store basic information
        self.expiry = expiry
        self.group_expiry = group_expiry
        self.capacity = capacity
        self.channel_capacity = self.compile_capacities(channel_capacity or {})
        self.prefix = prefix
        assert isinstance(self.prefix, str), "Prefix must be unicode"
        # Configure the host objects
        self.hosts = self.decode_hosts(hosts)
        self.ring_size = len(self.hosts)
        # Cached redis connection pools and the event loop they are from
        self.pools = [ConnectionPool(host) for host in self.hosts]
        # Normal channels choose a host index by cycling through the available hosts
        self._receive_index_generator = itertools.cycle(range(len(self.hosts)))
        self._send_index_generator = itertools.cycle(range(len(self.hosts)))
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8)
        )
        # Set up any encryption objects
        self._setup_encryption(symmetric_encryption_keys)
        # Number of coroutines trying to receive right now
        self.receive_count = 0
        # The receive lock
        self.receive_lock = None
        # Event loop they are trying to receive on
        self.receive_event_loop = None
        # Buffered messages by process-local channel name
        self.receive_buffer = collections.defaultdict(asyncio.Queue)
        # Detached channel cleanup tasks
        self.receive_cleaners = []
        # Per-channel cleanup locks to prevent a receive starting and moving
        # a message back into the main queue before its cleanup has completed
        self.receive_clean_locks = ChannelLock()
        logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        self.my_logging = logging.getLogger("zdz")

    def decode_hosts(self, hosts):
        """
        Takes the value of the "hosts" argument passed to the class and returns
        a list of kwargs to use for the Redis connection constructor.
        """
        # If no hosts were provided, return a default value
        if not hosts:
            return [{"address": ("localhost", 6379)}]
        # If they provided just a string, scold them.
        if isinstance(hosts, (str, bytes)):
            raise ValueError(
                "You must pass a list of Redis hosts, even if there is only one."
            )
        # Decode each hosts entry into a kwargs dict
        result = []
        for entry in hosts:
            if isinstance(entry, dict):
                result.append(entry)
            else:
                result.append({"address": entry})
        return result

    def _setup_encryption(self, symmetric_encryption_keys):
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, (str, bytes)):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [self.make_fernet(key) for key in symmetric_encryption_keys]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    ### Channel layer API ###

    extensions = ["groups", "flush"]

    async def send(self, channel, message):
        """
        Send a message onto a (general or specific) channel.
        """
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Make sure the message does not contain reserved keys
        assert "__asgi_channel__" not in message
        # If it's a process-local channel, strip off local part and stick full name in message
        channel_non_local_name = channel
        if "!" in channel:
            message = dict(message.items())
            message["__asgi_channel__"] = channel
            channel_non_local_name = self.non_local_name(channel)
        # Write out message into expiring key (avoids big items in list)
        channel_key = self.prefix + channel_non_local_name
        # Pick a connection to the right server - consistent for specific
        # channels, random for general channels
        if "!" in channel:
            index = self.consistent_hash(channel)
        else:
            index = next(self._send_index_generator)
        async with self.connection(index) as connection:
            # Check the length of the list before send
            # This can allow the list to leak slightly over capacity, but that's fine.
            if await connection.llen(channel_key) >= self.get_capacity(channel):
                raise ChannelFull()
            # Push onto the list then set it to expire in case it's not consumed
            await connection.lpush(channel_key, self.serialize(message))
            await connection.expire(channel_key, int(self.expiry))

    def _backup_channel_name(self, channel):
        """
        Construct the key used as a backup queue for the given channel.
        """
        return channel + "$inflight"

    async def _brpop_with_clean(self, index, channel, timeout):
        """
        Perform a Redis BRPOP and manage the backup processing queue.
        In case of cancellation, make sure the message is not lost.
        """
        # The script will pop messages from the processing queue and push them in front
        # of the main message queue in the proper order; BRPOP must *not* be called
        # because that would deadlock the server
        cleanup_script = """
            local backed_up = redis.call('LRANGE', ARGV[2], 0, -1)
            for i = #backed_up, 1, -1 do
                redis.call('LPUSH', ARGV[1], backed_up[i])
            end
            redis.call('DEL', ARGV[2])
        """
        backup_queue = self._backup_channel_name(channel)
        async with self.connection(index) as connection:
            # Cancellation here doesn't matter, we're not doing anything destructive
            # and the script executes atomically...
            await connection.eval(cleanup_script, keys=[], args=[channel, backup_queue])
            # ...and it doesn't matter here either, the message will be safe in the backup.
            return await connection.brpoplpush(channel, backup_queue, timeout=timeout)

    async def _clean_receive_backup(self, index, channel):
        """
        Pop the oldest message off the channel backup queue.
        The result isn't interesting as it was already processed.
        """
        async with self.connection(index) as connection:
            await connection.brpop(self._backup_channel_name(channel))

    async def receive(self, channel):
        """
        Receive the first message that arrives on the channel.
        If more than one coroutine waits on the same channel, the first waiter
        will be given the message when it arrives.
        """
        # Make sure the channel name is valid then get the non-local part
        # and thus its index
        assert self.valid_channel_name(channel)
        if "!" in channel:
            real_channel = self.non_local_name(channel)
            assert real_channel.endswith(
                self.client_prefix + "!"
            ), "Wrong client prefix"
            # Enter receiving section
            loop = asyncio.get_event_loop()
            self.receive_count += 1
            try:
                if self.receive_count == 1:
                    # If we're the first coroutine in, create the receive lock!
                    self.receive_lock = asyncio.Lock()
                    self.receive_event_loop = loop
                else:
                    # Otherwise, check our event loop matches
                    if self.receive_event_loop != loop:
                        raise RuntimeError(
                            "Two event loops are trying to receive() on one channel layer at once!"
                        )

                # Wait for our message to appear
                message = None
                while self.receive_buffer[channel].empty():
                    tasks = [
                        self.receive_lock.acquire(),
                        self.receive_buffer[channel].get(),
                    ]
                    tasks = [asyncio.ensure_future(task) for task in tasks]
                    try:
                        done, pending = await asyncio.wait(
                            tasks, return_when=asyncio.FIRST_COMPLETED
                        )
                        for task in pending:
                            # Cancel all pending tasks.
                            task.cancel()
                    except asyncio.CancelledError:
                        # Ensure all tasks are cancelled if we are cancelled.
                        # Also see: https://bugs.python.org/issue23859
                        del self.receive_buffer[channel]
                        for task in tasks:
                            if not task.cancel():
                                assert task.done()
                                if task.result() is True:
                                    self.receive_lock.release()

                        raise

                    message, token, exception = None, None, None
                    for task in done:
                        try:
                            result = task.result()
                        except Exception as error:  # NOQA
                            # We should not propagate exceptions immediately as otherwise this may cause
                            # the lock to be held and never be released.
                            exception = error
                            continue

                        if result is True:
                            token = result
                        else:
                            assert isinstance(result, dict)
                            message = result

                    if message or exception:
                        if token:
                            # We will not be receving as we already have the message.
                            self.receive_lock.release()

                        if exception:
                            raise exception
                        else:
                            break
                    else:
                        assert token

                        # We hold the receive lock, receive and then release it.
                        try:
                            # There is no interruption point from when the message is
                            # unpacked in receive_single to when we get back here, so
                            # the following lines are essentially atomic.
                            message_channel, message = await self.receive_single(
                                real_channel
                            )
                            if type(message_channel) is list:
                                for chan in message_channel:
                                    self.receive_buffer[chan].put_nowait(message)
                            else:
                                self.receive_buffer[message_channel].put_nowait(message)
                            message = None
                        except:
                            del self.receive_buffer[channel]
                            raise
                        finally:
                            self.receive_lock.release()

                # We know there's a message available, because there
                # couldn't have been any interruption between empty() and here
                if message is None:
                    message = self.receive_buffer[channel].get_nowait()

                if self.receive_buffer[channel].empty():
                    del self.receive_buffer[channel]
                return message

            finally:
                self.receive_count -= 1
                # If we were the last out, drop the receive lock
                if self.receive_count == 0:
                    assert not self.receive_lock.locked()
                    self.receive_lock = None
                    self.receive_event_loop = None
        else:
            # Do a plain direct receive
            return (await self.receive_single(channel))[1]

    async def receive_single(self, channel):
        """
        Receives a single message off of the channel and returns it.
        """
        # Check channel name
        assert self.valid_channel_name(channel, receive=True), "Channel name invalid"
        # Work out the connection to use
        if "!" in channel:
            assert channel.endswith("!")
            index = self.consistent_hash(channel)
        else:
            index = next(self._receive_index_generator)

        channel_key = self.prefix + channel
        content = None
        await self.receive_clean_locks.acquire(channel_key)
        try:
            while content is None:
                # Nothing is lost here by cancellations, messages will still
                # be in the backup queue.
                content = await self._brpop_with_clean(
                    index, channel_key, timeout=self.brpop_timeout
                )

            # Fire off a task to clean the message from its backup queue.
            # Per-channel locking isn't needed, because the backup is a queue
            # and additionally, we don't care about the order; all processed
            # messages need to be removed, no matter if the current one is
            # removed after the next one.
            # NOTE: Duplicate messages will be received eventually if any
            # of these cleaners are cancelled.
            cleaner = asyncio.ensure_future(
                self._clean_receive_backup(index, channel_key)
            )
            self.receive_cleaners.append(cleaner)

            def _cleanup_done(cleaner):
                self.receive_cleaners.remove(cleaner)
                self.receive_clean_locks.release(channel_key)

            cleaner.add_done_callback(_cleanup_done)

        except Exception:
            self.receive_clean_locks.release(channel_key)
            raise

        # Message decode
        message = self.deserialize(content)
        # TODO: message expiry?
        # If there is a full channel name stored in the message, unpack it.
        if "__asgi_channel__" in message:
            channel = message["__asgi_channel__"]
            del message["__asgi_channel__"]
        return channel, message

    async def new_channel(self, prefix="specific"):
        """
        Returns a new channel name that can be used by something in our
        process as a specific channel.
        """
        # TODO: Guarantee uniqueness better?
        return "%s.%s!%s" % (
            prefix,
            self.client_prefix,
            "".join(random.choice(string.ascii_letters) for i in range(12)),
        )

    ### Flush extension ###

    async def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        # Make sure all channel cleaners have finished before removing
        # keys from under their feet.
        await self.wait_received()

        # Lua deletion script
        delete_prefix = """
            local keys = redis.call('keys', ARGV[1])
            for i=1,#keys,5000 do
                redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
            end
        """
        # Go through each connection and remove all with prefix
        for i in range(self.ring_size):
            async with self.connection(i) as connection:
                await connection.eval(delete_prefix, keys=[], args=[self.prefix + "*"])
        # Now clear the pools as well
        await self.close_pools()

    async def close_pools(self):
        """
        Close all connections in the event loop pools.
        """
        # Flush all cleaners, in case somebody just wanted to close the
        # pools without flushing first.
        await self.wait_received()

        for pool in self.pools:
            await pool.close()

    async def wait_received(self):
        """
        Wait for all channel cleanup functions to finish.
        """
        if self.receive_cleaners:
            await asyncio.wait(self.receive_cleaners[:])

    ### Groups extension ###

    async def group_add(self, group, channel):
        """
        Adds the channel name to a group.
        """
        # Check the inputs
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Get a connection to the right shard
        group_key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            # Add to group sorted set with creation time as timestamp
            await connection.zadd(group_key, time.time(), channel)
            # Set expiration to be group_expiry, since everything in
            # it at this point is guaranteed to expire before that
            await connection.expire(group_key, self.group_expiry)

    async def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            await connection.zrem(key, channel)

    async def group_send(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # Retrieve list of all channel names
        key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            # Discard old channels based on group_expiry
            await connection.zremrangebyscore(
                key, min=0, max=int(time.time()) - self.group_expiry
            )

            channel_names = [
                x.decode("utf8") for x in await connection.zrange(key, 0, -1)
            ]

        connection_to_channel_keys, channel_keys_to_message, channel_keys_to_capacity = self._map_channel_keys_to_connection(
            channel_names, message
        )

        for connection_index, channel_redis_keys in connection_to_channel_keys.items():

            # Create a LUA script specific for this connection.
            # Make sure to use the message specific to this channel, it is
            # stored in channel_to_message dict and contains the
            # __asgi_channel__ key.

            group_send_lua = (
                """
                    for i=1,#KEYS do
                        if redis.call('LLEN', KEYS[i]) < tonumber(ARGV[i + #KEYS]) then
                            redis.call('LPUSH', KEYS[i], ARGV[i])
                            redis.call('EXPIRE', KEYS[i], %d)
                        end
                    end
                    """
                % self.expiry
            )

            # We need to filter the messages to keep those related to the connection
            args = [
                channel_keys_to_message[channel_key]
                for channel_key in channel_redis_keys
            ]

            # We need to send the capacity for each channel
            args += [
                channel_keys_to_capacity[channel_key]
                for channel_key in channel_redis_keys
            ]
            self.my_logging.info("group_send_lua:" + group_send_lua)
            for item in channel_redis_keys:
                self.my_logging.info("keys: " + item)
            for item in args:
                self.my_logging.info("args: " + bytes.decode(item))
            # channel_keys does not contain a single redis key more than once
            async with self.connection(connection_index) as connection:
                await connection.eval(
                    group_send_lua, keys=channel_redis_keys, args=args
                )

    def _map_channel_to_connection(self, channel_names, message):
        """
        For a list of channel names, bucket each one to a dict keyed by the
        connection index
        Also for each channel create a message specific to that channel, adding
        the __asgi_channel__ key to the message
        We also return a mapping from channel names to their corresponding Redis
        keys, and a mapping of channels to their capacity
        """
        connection_to_channels = collections.defaultdict(list)
        channel_to_message = dict()
        channel_to_capacity = dict()
        channel_to_key = dict()

        for channel in channel_names:
            channel_non_local_name = channel
            if "!" in channel:
                message = dict(message.items())
                message["__asgi_channel__"] = channel
                channel_non_local_name = self.non_local_name(channel)
            channel_key = self.prefix + channel_non_local_name
            idx = self.consistent_hash(channel_non_local_name)
            connection_to_channels[idx].append(channel_key)
            channel_to_capacity[channel] = self.get_capacity(channel)
            channel_to_message[channel] = self.serialize(message)
            # We build a
            channel_to_key[channel] = channel_key

        return (
            connection_to_channels,
            channel_to_message,
            channel_to_capacity,
            channel_to_key,
        )

    def _map_channel_keys_to_connection(self, channel_names, message):
        """
        For a list of channel names, GET

        1. list of their redis keys bucket each one to a dict keyed by the connection index

        2. for each unique channel redis key create a serialized message specific to that redis key, by adding
           the list of channels mapped to that redis key in __asgi_channel__ key to the message

        3. returns a mapping of redis channels keys to their capacity
        """

        # Connection dict keyed by index to list of redis keys mapped on that index
        connection_to_channel_keys = collections.defaultdict(list)
        # Message dict maps redis key to the message that needs to be send on that key
        channel_key_to_message = dict()
        # Channel key mapped to its capacity
        channel_key_to_capacity = dict()

        # For each channel
        for channel in channel_names:
            channel_non_local_name = channel
            if "!" in channel:
                channel_non_local_name = self.non_local_name(channel)
            # Get its redis key
            channel_key = self.prefix + channel_non_local_name
            # Have we come across the same redis key?
            if channel_key not in channel_key_to_message.keys():
                # If not, fill the corresponding dicts
                message = dict(message.items())
                message["__asgi_channel__"] = [channel]
                channel_key_to_message[channel_key] = message
                channel_key_to_capacity[channel_key] = self.get_capacity(channel)
                idx = self.consistent_hash(channel_non_local_name)
                connection_to_channel_keys[idx].append(channel_key)
            else:
                # Yes, Append the channel in message dict
                channel_key_to_message[channel_key]["__asgi_channel__"].append(channel)

        # Now that we know what message needs to be send on a redis key we serialize it
        for key in channel_key_to_message.keys():
            # Serialize the message stored for each redis key
            channel_key_to_message[key] = self.serialize(channel_key_to_message[key])

        return (
            connection_to_channel_keys,
            channel_key_to_message,
            channel_key_to_capacity,
        )

    def _group_key(self, group):
        """
        Common function to make the storage key for the group.
        """
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, raw=False)

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using CRC, then down to one of the ring nodes.
        """
        if isinstance(value, str):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xFFF
        ring_divisor = 4096 / float(self.ring_size)
        return int(bigval / ring_divisor)

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet

        if isinstance(key, str):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)

    ### Connection handling ###

    def connection(self, index):
        """
        Returns the correct connection for the index given.
        Lazily instantiates pools.
        """
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError(
                "There are only %s hosts - you asked for %s!" % (self.ring_size, index)
            )
        # Make a context manager
        return self.ConnectionContextManager(self.pools[index])

    class ConnectionContextManager:
        """
        Async context manager for connections
        """

        def __init__(self, pool):
            self.pool = pool

        async def __aenter__(self):
            self.conn = await self.pool.pop()
            return self.conn

        async def __aexit__(self, exc_type, exc, tb):
            if exc:
                self.pool.conn_error(self.conn)
            else:
                self.pool.push(self.conn)
            self.conn = None
Exemplo n.º 20
0
class PeerManager:
    """Manage Peer connections."""
    def __init__(self,
                 fernet_tokens: List[str],
                 throttling: Optional[int] = None):
        """Initialize Peer Manager."""
        self._fernet = MultiFernet([Fernet(key) for key in fernet_tokens])
        self._throttling = throttling
        self._peers = {}

    @property
    def connections(self) -> int:
        """Return count of connected devices."""
        return len(self._peers)

    def create_peer(self, fernet_data: bytes) -> Peer:
        """Create a new peer from crypt config."""
        try:
            data = self._fernet.decrypt(fernet_data).decode()
            config = json.loads(data)
        except (InvalidToken, json.JSONDecodeError):
            _LOGGER.warning("Invalid fernet token")
            raise SniTunInvalidPeer()

        # Check if token is valid
        valid = datetime.utcfromtimestamp(config["valid"])
        if valid < datetime.utcnow():
            _LOGGER.warning("Token was expired")
            raise SniTunInvalidPeer()

        # Extract configuration
        hostname = config["hostname"]
        aes_key = bytes.fromhex(config["aes_key"])
        aes_iv = bytes.fromhex(config["aes_iv"])

        return Peer(hostname,
                    valid,
                    aes_key,
                    aes_iv,
                    throttling=self._throttling)

    def add_peer(self, peer: Peer) -> None:
        """Register peer to internal hostname list."""
        if self.peer_available(peer.hostname):
            _LOGGER.warning("Found stale peer connection")
            self._peers[peer.hostname].multiplexer.shutdown()

        _LOGGER.debug("New peer connection: %s", peer.hostname)
        self._peers[peer.hostname] = peer

    def remove_peer(self, peer: Peer) -> None:
        """Remove peer from list."""
        if self._peers.get(peer.hostname) != peer:
            return
        _LOGGER.debug("Close peer connection: %s", peer.hostname)
        self._peers.pop(peer.hostname)

    def peer_available(self, hostname: str) -> bool:
        """Check if peer available and return True or False."""
        if hostname in self._peers:
            return self._peers[hostname].is_ready
        return False

    def get_peer(self, hostname: str) -> Optional[Peer]:
        """Get peer."""
        return self._peers.get(hostname)
Exemplo n.º 21
0
}),
                  encoding="utf-8")
ciphertext = f.encrypt(plaintext)
with open("details.txt", "wb") as file:
    file.write(ciphertext)
print(plaintext, f.decrypt(ciphertext))

with open("details.txt", "rb") as file:
    token = file.read()
print(f.decrypt(token))

message = b"Secret message!"
password = b"password"
key1 = Fernet(Fernet.generate_key())
key2 = Fernet(Fernet.generate_key())
f = MultiFernet([key1, key2])

salt = os.urandom(16)
kdf = PBKDF2HMAC(algorithm=hashes.SHA256(),
                 length=32,
                 salt=salt,
                 iterations=390000)
key = base64.urlsafe_b64encode(kdf.derive(password))
f = Fernet(key)
print(salt)

token = f.encrypt(message)
print(token)
kdf = PBKDF2HMAC(algorithm=hashes.SHA256(),
                 length=32,
                 salt=salt,
Exemplo n.º 22
0
class DataManager(object):
    """Holds the internal state for a single Dask Gateway.

    Keeps the memory representation in-sync with the database.
    """
    def __init__(self, url="sqlite:///:memory:", encrypt_keys=(), **kwargs):
        if url.startswith("sqlite"):
            kwargs["connect_args"] = {"check_same_thread": False}

        if is_in_memory_db(url):
            kwargs["poolclass"] = StaticPool
            self.fernet = None
        else:
            self.fernet = MultiFernet([Fernet(key) for key in encrypt_keys])

        engine = create_engine(url, **kwargs)
        if url.startswith("sqlite"):
            register_foreign_keys(engine)

        metadata.create_all(engine)

        self.db = engine

        self.username_to_user = {}
        self.cookie_to_user = {}
        self.token_to_cluster = {}
        self.name_to_cluster = {}
        self.id_to_cluster = {}

    def load_database_state(self):
        # Load all existing users into memory
        id_to_user = {}
        for u in self.db.execute(users.select()):
            user = User(id=u.id, name=u.name, cookie=u.cookie)
            self.username_to_user[user.name] = user
            self.cookie_to_user[user.cookie] = user
            id_to_user[user.id] = user

        # Next load all existing clusters into memory
        for c in self.db.execute(clusters.select()):
            user = id_to_user[c.user_id]
            tls_cert, tls_key = self.decode_tls_credentials(c.tls_credentials)
            token = self.decode_token(c.token)
            cluster = Cluster(
                id=c.id,
                name=c.name,
                user=user,
                token=token,
                options=c.options,
                status=c.status,
                state=c.state,
                scheduler_address=c.scheduler_address,
                dashboard_address=c.dashboard_address,
                api_address=c.api_address,
                tls_cert=tls_cert,
                tls_key=tls_key,
                start_time=c.start_time,
                stop_time=c.stop_time,
            )
            self.id_to_cluster[cluster.id] = cluster
            self.token_to_cluster[cluster.token] = cluster
            self.name_to_cluster[cluster.name] = cluster
            user.clusters[cluster.name] = cluster

        # Next load all existing workers into memory
        for w in self.db.execute(workers.select()):
            cluster = self.id_to_cluster[w.cluster_id]
            worker = Worker(
                id=w.id,
                name=w.name,
                status=w.status,
                cluster=cluster,
                state=w.state,
                start_time=w.start_time,
                stop_time=w.stop_time,
            )
            cluster.workers[worker.name] = worker
            if w.status == WorkerStatus.STARTING:
                cluster.pending.add(worker.name)

    def cleanup_expired(self, max_age_in_seconds):
        cutoff = timestamp() - max_age_in_seconds * 1000
        with self.db.begin() as conn:
            to_delete = conn.execute(
                select([clusters.c.id
                        ]).where(clusters.c.stop_time < cutoff)).fetchall()

            if to_delete:
                to_delete = [i for i, in to_delete]

                conn.execute(
                    clusters.delete().where(clusters.c.id == bindparam("id")),
                    [{
                        "id": i
                    } for i in to_delete],
                )

                for i in to_delete:
                    cluster = self.id_to_cluster.pop(i)
                    del self.token_to_cluster[cluster.token]
                    del self.name_to_cluster[cluster.name]
                    del cluster.user.clusters[cluster.name]

        return len(to_delete)

    def encrypt(self, b):
        """Encrypt bytes ``b``. If encryption is disabled this is a no-op"""
        return b if self.fernet is None else self.fernet.encrypt(b)

    def decrypt(self, b):
        """Decrypt bytes ``b``. If encryption is disabled this is a no-op"""
        return b if self.fernet is None else self.fernet.decrypt(b)

    def encode_tls_credentials(self, tls_cert, tls_key):
        return self.encrypt(b";".join((tls_cert, tls_key)))

    def decode_tls_credentials(self, data):
        return self.decrypt(data).split(b";")

    def encode_token(self, token):
        return self.encrypt(token.encode("utf8"))

    def decode_token(self, data):
        return self.decrypt(data).decode()

    def user_from_cookie(self, cookie):
        """Lookup a user from a cookie"""
        return self.cookie_to_user.get(cookie)

    def get_or_create_user(self, username):
        """Lookup a user if they exist, otherwise create a new user"""
        user = self.username_to_user.get(username)
        if user is None:
            cookie = uuid.uuid4().hex
            res = self.db.execute(users.insert().values(name=username,
                                                        cookie=cookie))
            user = User(id=res.inserted_primary_key[0],
                        name=username,
                        cookie=cookie)
            self.cookie_to_user[cookie] = user
            self.username_to_user[username] = user
        return user

    def cluster_from_token(self, token):
        """Lookup a cluster from a token"""
        return self.token_to_cluster.get(token)

    def cluster_from_name(self, name):
        """Lookup a cluster by name"""
        return self.name_to_cluster.get(name)

    def active_clusters(self):
        for user in self.username_to_user.values():
            for cluster in user.clusters.values():
                if cluster.is_active():
                    yield cluster

    def create_cluster(self, user, options):
        """Create a new cluster for a user"""
        cluster_name = uuid.uuid4().hex
        token = uuid.uuid4().hex
        tls_cert, tls_key = new_keypair(cluster_name)
        # Encode the tls credentials for storing in the database
        tls_credentials = self.encode_tls_credentials(tls_cert, tls_key)
        enc_token = self.encode_token(token)

        common = {
            "name": cluster_name,
            "options": options,
            "status": ClusterStatus.STARTING,
            "state": {},
            "scheduler_address": "",
            "dashboard_address": "",
            "api_address": "",
            "start_time": timestamp(),
        }

        with self.db.begin() as conn:
            res = conn.execute(clusters.insert().values(
                user_id=user.id,
                tls_credentials=tls_credentials,
                token=enc_token,
                **common,
            ))
            cluster = Cluster(
                id=res.inserted_primary_key[0],
                user=user,
                token=token,
                tls_cert=tls_cert,
                tls_key=tls_key,
                **common,
            )
            self.id_to_cluster[cluster.id] = cluster
            self.token_to_cluster[token] = cluster
            self.name_to_cluster[cluster_name] = cluster
            user.clusters[cluster_name] = cluster

        return cluster

    def create_worker(self, cluster):
        """Create a new worker for a cluster"""
        worker_name = uuid.uuid4().hex

        common = {
            "name": worker_name,
            "status": WorkerStatus.STARTING,
            "state": {},
            "start_time": timestamp(),
        }

        with self.db.begin() as conn:
            res = conn.execute(workers.insert().values(cluster_id=cluster.id,
                                                       **common))
            worker = Worker(id=res.inserted_primary_key[0],
                            cluster=cluster,
                            **common)
            cluster.pending.add(worker.name)
            cluster.workers[worker.name] = worker

        return worker

    def update_cluster(self, cluster, **kwargs):
        """Update a cluster's state"""
        with self.db.begin() as conn:
            conn.execute(clusters.update().where(
                clusters.c.id == cluster.id).values(**kwargs))
            for k, v in kwargs.items():
                setattr(cluster, k, v)

    def update_worker(self, worker, **kwargs):
        """Update a worker's state"""
        with self.db.begin() as conn:
            conn.execute(workers.update().where(
                workers.c.id == worker.id).values(**kwargs))
            for k, v in kwargs.items():
                setattr(worker, k, v)
Exemplo n.º 23
0
import hashlib
from cryptography.fernet import Fernet, MultiFernet
from SecurityMS.settings import HASHES, SALT, KEY_F1, KEY_F2, KEY_F3, KEY_F4
from ..models import AuthUser
#Encryption
K1 = Fernet(KEY_F1)
K2 = Fernet(KEY_F2)
K3 = Fernet(KEY_F3)
K4 = Fernet(KEY_F4)
MF = MultiFernet([K1, K2, K3, K4])


def hasher(msg: str) -> str:
    m = hashlib.sha512()
    hashedMsg = (msg + SALT)
    for i in range(HASHES):
        m.update(hashedMsg.encode('utf-8'))
        hashedMsg = m.hexdigest()
    return str(hashedMsg)


def encryptedMSG(msg: str) -> str:
    return str(MF.encrypt(msg.encode('utf-8'))).encode('utf-8')


def decryptorMSG(msg: str) -> str:
    return MF.decrypt(eval(msg)).decode()


def verify(request: dict) -> dict:
    try:
Exemplo n.º 24
0
class Cryptograph(object):
    """Symmetric encryption and decryption for the storage of sensitive data.

    We currently rely on Fernet, which was the algorithm adopted by Gratipay:
    https://github.com/gratipay/gratipay.com/pull/3998#issuecomment-216227070

    For encryption Fernet uses the AES cipher in CBC mode with PKCS7 padding and
    a 128 bits key. For authentication it uses HMAC-SHA256 with another 128 bits
    key.

    Fernet messages contain the timestamp at which they were generated *in plain
    text*. This isn't a problem for us since we want to store the time at which
    the data was encrypted in order to facilitate key rotation.

    We use CBOR (Concise Binary Object Representation) to serialize objects
    before encryption. Compared to JSON, CBOR is faster to parse and serialize,
    more compact, and extensible (it can represent any data type using "tags").
    More info on CBOR: http://cbor.io/ https://tools.ietf.org/html/rfc7049
    """

    KEY_ROTATION_DELAY = timedelta(weeks=1)

    def __init__(self):
        if website.env.aws_secret_access_key:
            sm = self.secrets_manager = boto3.client('secretsmanager', region_name='eu-west-1')
            secret = sm.get_secret_value(SecretId='Fernet')
            rotation_start = secret['CreatedDate'].date()
            keys = secret['SecretString'].split()
        else:
            self.secrets_manager = None
            parts = os.environ['SECRET_FERNET_KEYS'].split()
            rotation_start = date(*map(int, parts[0].split('-')))
            keys = parts[1:]
        self.fernet_rotation_start = rotation_start
        self.fernet_keys = [k.encode('ascii') for k in keys]
        self.fernet = MultiFernet([Fernet(k) for k in self.fernet_keys])

    def encrypt_dict(self, dic, allow_single_key=False):
        """Serialize and encrypt a dictionary for storage in the database.

        Encrypting partially predictable data may help an attacker break the
        encryption key, so to make our data less predictable we randomize the
        order of the dict's items before serializing it.

        For this to be effective the CBOR serializer must not sort the items
        again in an attempt to produce Canonical CBOR, so we explicitly pass
        `canonical=False` to the `cbor.dumps` function.

        In addition, the dict must not contain only one key if that key is
        predictable, so a `CryptoWarning` is emitted when `dic` only contains
        one key, unless `allow_single_key` is set to `True`.
        """
        dic = self.randomize_dict(dic, allow_single_key=allow_single_key)
        serialized = cbor.dumps(dic, canonical=False)
        encrypted = self.fernet.encrypt(serialized)
        return Encrypted(dict(scheme='fernet', payload=encrypted, ts=utcnow()))

    def decrypt(self, scheme, payload):
        """Decrypt and reconstruct an object stored in the database.
        """
        if scheme == 'fernet':
            decrypted = self.fernet.decrypt(payload)
        else:
            raise ValueError('unknown encryption scheme %r' % scheme)
        return cbor.loads(decrypted)

    @staticmethod
    def randomize_dict(dic, allow_single_key=False):
        """Randomize the order of a dictionary's items.

        Emits a `CryptoWarning` if `dic` only contains one key, unless
        `allow_single_key` is set to `True`.
        """
        if not isinstance(dic, dict):
            raise TypeError("expected a dict, got %s" % type(dic))
        # Compute the number of random bytes needed based on the size of the dict
        n = len(dic)
        if n < 2:
            # Can't randomize the order if the dict contains less than 2 items
            if n == 1 and not allow_single_key:
                warnings.warn("dict only contains one key", CryptoWarning)
            return dic
        n = int(log(n, 2) // 8) + 2
        # Return a new ordered dict sorted randomly
        return OrderedDict(
            t[1] for t in sorted((urandom(n), item) for item in dic.items())
        )

    def rotate_key(self):
        """Generate a new key and send it to the secrets manager.
        """
        keys = b' '.join([Fernet.generate_key()] + self.fernet_keys).decode()
        if self.secrets_manager:
            self.secrets_manager.update_secret(SecretId='Fernet', SecretString=keys)
        else:
            keys = utcnow().date().isoformat() + ' ' + keys
            print("No secrets manager, updating the key storage is up to you.")
        return keys

    def rotate_message(self, msg, force=False):
        """Re-encrypt a single message using the current primary key.

        The original timestamp included in the message is always preserved.
        Moreover the entire message is returned unchanged if it was already
        encrypted from the latest key and `force` is `False` (the default).

        `InvalidToken` is raised if decryption fails.
        """
        timestamp, data = Fernet._get_unverified_token_data(msg)
        for i, fernet in enumerate(self.fernet._fernets):
            try:
                p = fernet._decrypt_data(data, timestamp, None)
            except InvalidToken:
                continue
            if i == 0 and not force:
                # This message was encrypted using the latest key, return it
                return msg
            break
        else:
            raise InvalidToken

        iv = os.urandom(16)
        return self.fernet._fernets[0]._encrypt_from_parts(p, timestamp, iv)

    def rotate_stored_data(self, wait=True):
        """Re-encrypt all the sensitive information stored in our database.

        This function is a special kind of "cron job" that returns one of two
        constants from the `liberapay.cron` module: `CRON_ENCORE`, indicating
        that the function needs to be run again to continue its work, or
        `CRON_STOP`, indicating that all the ciphertexts are up-to-date (or that
        it isn't time to rotate yet).

        Rows are processed in batches of 50. Timestamps are used to keep track of
        progress and to avoid overwriting new data with re-encrypted old data.

        The update only starts one week after the new key was generated, unless
        `wait` is set to `False`. This delay is to "ensure" that the previous
        key is no longer being used to encrypt new data.
        """
        update_start = self.fernet_rotation_start + self.KEY_ROTATION_DELAY
        if wait:
            if utcnow().date() < update_start:
                return CRON_STOP

        with website.db.get_cursor() as cursor:
            batch = cursor.all("""
                SELECT id, info
                  FROM identities
                 WHERE (info).ts <= %s
              ORDER BY (info).ts ASC
                 LIMIT 50
            """, (update_start,))
            if not batch:
                return CRON_STOP

            sql = """
                UPDATE identities
                   SET info = ('fernet', %s, current_timestamp)::encrypted
                 WHERE id = %s
                   AND (info).ts = %s;
            """
            args_list = [
                (self.rotate_message(r.info.payload), r.id, r.info.ts)
                for r in batch
            ]
            execute_batch(cursor, sql, args_list)

        return CRON_ENCORE
Exemplo n.º 25
0
from cryptography.fernet import Fernet, MultiFernet
# key generation

key1 = Fernet(Fernet.generate_key())
key2 = Fernet(Fernet.generate_key())
# the MultiFernet takes a list of Fernet instances

f = MultiFernet([key1, key2])
# encryption and token generation
token = f.encrypt(b"zulkepretes make templest os test")
# display the ciphertext
print(token)
# decryption of ciphertext to plaintext
d = f.decrypt(token)
#display the plaintext
#decode() method converts byte to string
print(d.decode())
Exemplo n.º 26
0
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
password = settings.INKSHOP_ENCRYPTION_KEY.encode("utf-8")
salt = settings.INKSHOP_ENCRYPTION_SALT.encode("utf-8")
kdf = PBKDF2HMAC(
    algorithm=hashes.SHA256(),
    length=32,
    salt=salt,
    iterations=100000,
    backend=default_backend()
)
key = Fernet(base64.urlsafe_b64encode(kdf.derive(password)))

# Future support for key rotation
# https://cryptography.io/en/latest/fernet/#cryptography.fernet.MultiFernet
f = MultiFernet([key, ])
# f = key


def rand_str(length=20):
    # from Factory
    choices = string.ascii_uppercase + string.ascii_lowercase + string.digits
    return ''.join(random.SystemRandom().choice(choices) for i in range(length))


def encrypt(s):
    if s is None:
        return None
    if settings.DISABLE_ENCRYPTION_FOR_TESTS:
        return s
    return f.encrypt(s.encode('utf-8')).decode()
Exemplo n.º 27
0
F = Fernet(key)
info = "my deep dark secret"
# 加密信息
token = F.encrypt(info.encode())
print("token: ", token)
# 解密信息
de_info = F.decrypt(token)
print(de_info.decode())

key1 = Fernet.generate_key()
print("key1: ", key1)
F1 = Fernet(key1)
key2 = Fernet.generate_key()
print("key2: ", key2)
F2 = Fernet(key2)

# MultiFernet performs all encryption options using the first key in the list provided.
# MultiFernet attempts to decrypt tokens with each key in turn.
FM = MultiFernet([F1, F2])
tokenFM = FM.encrypt(info.encode())
print("tokenFM: ", tokenFM)
print("FM.decrypt: ", FM.decrypt(tokenFM))
key3 = Fernet.generate_key()
print("key3: ", key3)
F3 = Fernet(key3)
FM2 = MultiFernet([F3, F1, F2])
# rotate a token by decrypt and re-encrypting it under the MultiFernet instance’s primary key.
rotated = FM2.rotate(tokenFM)
print("rotated: ", rotated)
print(FM2.decrypt(rotated))
Exemplo n.º 28
0
class Cryptograph:
    """Symmetric encryption and decryption for the storage of sensitive data.

    We currently rely on Fernet, which was the algorithm adopted by Gratipay:
    https://github.com/gratipay/gratipay.com/pull/3998#issuecomment-216227070

    For encryption Fernet uses the AES cipher in CBC mode with PKCS7 padding and
    a 128 bits key. For authentication it uses HMAC-SHA256 with another 128 bits
    key.

    Fernet messages contain the timestamp at which they were generated *in plain
    text*. This isn't a problem for us since we want to store the time at which
    the data was encrypted in order to facilitate key rotation.

    We use CBOR (Concise Binary Object Representation) to serialize objects
    before encryption. Compared to JSON, CBOR is faster to parse and serialize,
    more compact, and extensible (it can represent any data type using "tags").
    More info on CBOR: http://cbor.io/ https://tools.ietf.org/html/rfc7049
    """

    KEY_ROTATION_DELAY = timedelta(weeks=1)

    def __init__(self):
        if website.env.aws_secret_access_key:
            sm = self.secrets_manager = boto3.client('secretsmanager',
                                                     region_name='eu-west-1')
            secret = sm.get_secret_value(SecretId='Fernet')
            rotation_start = secret['CreatedDate'].date()
            keys = secret['SecretString'].split()
        else:
            self.secrets_manager = None
            parts = os.environ['SECRET_FERNET_KEYS'].split()
            rotation_start = date(*map(int, parts[0].split('-')))
            keys = parts[1:]
        self.fernet_rotation_start = rotation_start
        self.fernet_keys = [k.encode('ascii') for k in keys]
        self.fernet = MultiFernet([Fernet(k) for k in self.fernet_keys])

    def encrypt_dict(self, dic, allow_single_key=False):
        """Serialize and encrypt a dictionary for storage in the database.

        Encrypting partially predictable data may help an attacker break the
        encryption key, so to make our data less predictable we randomize the
        order of the dict's items before serializing it.

        For this to be effective the CBOR serializer must not sort the items
        again in an attempt to produce Canonical CBOR, so we explicitly pass
        `canonical=False` to the `cbor.dumps` function.

        In addition, the dict must not contain only one key if that key is
        predictable, so a `CryptoWarning` is emitted when `dic` only contains
        one key, unless `allow_single_key` is set to `True`.
        """
        dic = self.randomize_dict(dic, allow_single_key=allow_single_key)
        serialized = cbor.dumps(dic, canonical=False)
        encrypted = self.fernet.encrypt(serialized)
        return Encrypted(('fernet', encrypted, utcnow()))

    def decrypt(self, scheme, payload):
        """Decrypt and reconstruct an object stored in the database.
        """
        if scheme == 'fernet':
            decrypted = self.fernet.decrypt(payload)
        else:
            raise ValueError('unknown encryption scheme %r' % scheme)
        return cbor.loads(decrypted)

    @staticmethod
    def randomize_dict(dic, allow_single_key=False):
        """Randomize the order of a dictionary's items.

        Emits a `CryptoWarning` if `dic` only contains one key, unless
        `allow_single_key` is set to `True`.
        """
        if not isinstance(dic, dict):
            raise TypeError("expected a dict, got %s" % type(dic))
        # Compute the number of random bytes needed based on the size of the dict
        n = len(dic)
        if n < 2:
            # Can't randomize the order if the dict contains less than 2 items
            if n == 1 and not allow_single_key:
                warnings.warn("dict only contains one key", CryptoWarning)
            return dic
        n = int(log(n, 2) // 8) + 2
        # Return a new ordered dict sorted randomly
        return OrderedDict(t[1] for t in sorted((urandom(n), item)
                                                for item in dic.items()))

    def rotate_key(self):
        """Generate a new key and send it to the secrets manager.
        """
        keys = b' '.join([Fernet.generate_key()] + self.fernet_keys).decode()
        if self.secrets_manager:
            self.secrets_manager.update_secret(SecretId='Fernet',
                                               SecretString=keys)
        else:
            keys = utcnow().date().isoformat() + ' ' + keys
            print("No secrets manager, updating the key storage is up to you.")
        return keys

    def rotate_message(self, msg, force=False):
        """Re-encrypt a single message using the current primary key.

        The original timestamp included in the message is always preserved.
        Moreover the entire message is returned unchanged if it was already
        encrypted from the latest key and `force` is `False` (the default).

        `InvalidToken` is raised if decryption fails.
        """
        timestamp, data = Fernet._get_unverified_token_data(msg)
        for i, fernet in enumerate(self.fernet._fernets):
            try:
                p = fernet._decrypt_data(data, timestamp, None)
            except InvalidToken:
                continue
            if i == 0 and not force:
                # This message was encrypted using the latest key, return it
                return msg
            break
        else:
            raise InvalidToken

        iv = os.urandom(16)
        return self.fernet._fernets[0]._encrypt_from_parts(p, timestamp, iv)

    def rotate_stored_data(self, wait=True):
        """Re-encrypt all the sensitive information stored in our database.

        This function is a special kind of "cron job" that returns one of two
        constants from the `liberapay.cron` module: `CRON_ENCORE`, indicating
        that the function needs to be run again to continue its work, or
        `CRON_STOP`, indicating that all the ciphertexts are up-to-date (or that
        it isn't time to rotate yet).

        Rows are processed in batches of 50. Timestamps are used to keep track of
        progress and to avoid overwriting new data with re-encrypted old data.

        The update only starts one week after the new key was generated, unless
        `wait` is set to `False`. This delay is to "ensure" that the previous
        key is no longer being used to encrypt new data.
        """
        update_start = self.fernet_rotation_start + self.KEY_ROTATION_DELAY
        if wait:
            if utcnow().date() < update_start:
                return CRON_STOP

        with website.db.get_cursor() as cursor:
            batch = cursor.all(
                """
                SELECT id, info
                  FROM identities
                 WHERE (info).ts <= %s
              ORDER BY (info).ts ASC
                 LIMIT 50
            """, (update_start, ))
            if not batch:
                return CRON_STOP

            sql = """
                UPDATE identities
                   SET info = ('fernet', %s, current_timestamp)::encrypted
                 WHERE id = %s
                   AND (info).ts = %s;
            """
            args_list = [(self.rotate_message(r.info.payload), r.id, r.info.ts)
                         for r in batch]
            execute_batch(cursor, sql, args_list)

        return CRON_ENCORE
Exemplo n.º 29
0
class SecretStore:
    def __init__(self, *master_keys, encrypted_store: dict = None):
        if not len(master_keys):
            raise ValueError('at least one master key must be passed')
        self.crypt = MultiFernet([Fernet(key) for key in master_keys])
        if not encrypted_store:
            self.encrypted_store = dict()
        else:
            self.encrypted_store = encrypted_store

    @staticmethod
    def generate_master_key():
        return Fernet.generate_key()

    @staticmethod
    def add_master_key(key_yaml_path):
        master_key = SecretStore.generate_master_key()
        try:
            master_keys = SecretStore._load_keys(key_yaml_path)
        except OSError:
            master_keys = []
        master_keys = [master_key] + master_keys
        SecretStore._save_as_yaml(key_yaml_path, 'keys', master_keys)
        return master_keys

    @staticmethod
    def _load_keys(key_yaml_path):
        with open(key_yaml_path, 'r') as key_file:
            master_keys = yaml.load(key_file)['keys']
            return master_keys

    @classmethod
    def load_from_yaml(cls, key_yaml_path, store_yaml_path=None, encrypted=True):
        master_keys = SecretStore._load_keys(key_yaml_path)
        secret_store = cls(*master_keys)
        if store_yaml_path:
            secret_store.load_as_yaml(store_yaml_path, encrypted=encrypted)
        return secret_store

    def encrypt_copy(self, plain_store, *path):
        for key in plain_store:
            value = plain_store[key]
            if isinstance(value, bytes) or isinstance(value, str):
                self.set_secret(value, *path, key)
            else:
                self.encrypt_copy(value, *(list(path) + [key]))

    def set_secret(self, secret, *path):
        if not len(path):
            raise ValueError('path to secret must not be empty')
        if not (isinstance(secret, bytes) or isinstance(secret, str)):
            raise ValueError(
                'secret must be bytes or str, but {0} is passed'.format(
                    type(secret)))
        if isinstance(secret, str):
            secret = secret.encode('utf-8')
        encrypted_secret = self.crypt.encrypt(secret)
        store = self.encrypted_store
        for key in path[:-1]:
            store = store.setdefault(key, dict())
        store[path[-1]] = encrypted_secret

    def get_secret(self, *path):
        encrypted_secret = self.get_encrypted_secret(*path)
        return self.crypt.decrypt(encrypted_secret)

    def delete_secret(self, *path):
        if not len(path):
            raise ValueError('path to secret must not be empty')
        store = self.encrypted_store
        for key in path[:-1]:
            store = store[key]
        del store[path[-1]]

    def get_encrypted_secret(self, *path):
        if not len(path):
            raise ValueError('path to secret must not be empty')
        store = self.encrypted_store
        for key in path[:-1]:
            store = store[key]
        encrypted_secret = store[path[-1]]
        return encrypted_secret

    def load_as_yaml(self, yaml_path, encrypted=True):
        with open(yaml_path, 'r') as secret_file:
            secret_storage = yaml.load(secret_file)
            if encrypted:
                self.encrypted_store = secret_storage['encrypted_store']
            else:
                self.encrypt_copy(secret_storage['encrypted_store'])

    def save_as_yaml(self, yaml_path):
        SecretStore._save_as_yaml(yaml_path, 'encrypted_store', self.encrypted_store)

    def print_as_yaml(self):
        print(yaml.dump(self.encrypted_store, default_flow_style=False))

    @staticmethod
    def _wrap_payload(payload_key, payload):
        now = datetime.now()
        timestamp = now.replace(tzinfo=timezone.utc).timestamp()
        wrapper = {
            'meta': {
                'method': 'fernet',
                'timestamp': timestamp,
                'timezone': 'utc'
            },
            payload_key: payload
        }
        return wrapper

    @staticmethod
    def _save_as_yaml(yaml_path, payload_key, payload):
        content = SecretStore._wrap_payload(payload_key, payload)
        with open(yaml_path, 'w') as yaml_file:
            yaml.dump(content, yaml_file, default_flow_style=False)
Exemplo n.º 30
0
Arquivo: core.py Projeto: MisaGu/chess
class RedisChannelLayer(BaseChannelLayer):
    """
    ORM-backed channel environment. For development use only; it will span
    multiple processes fine, but it's going to be pretty bad at throughput.
    """

    blpop_timeout = 5

    def __init__(self, expiry=60, hosts=None, prefix="asgi:", group_expiry=86400, capacity=100, channel_capacity=None,
                 symmetric_encryption_keys=None):
        super(RedisChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )
        # Make sure they provided some hosts, or provide a default
        if not hosts:
            hosts = [("localhost", 6379)]
        self.hosts = []
        for entry in hosts:
            if isinstance(entry, six.string_types):
                self.hosts.append(entry)
            else:
                self.hosts.append("redis://%s:%d/0" % (entry[0],entry[1]))
        self.prefix = prefix
        assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
        # Precalculate some values for ring selection
        self.ring_size = len(self.hosts)
        self.ring_divisor = int(math.ceil(4096 / float(self.ring_size)))
        # Create connections ahead of time (they won't call out just yet, but
        # we want to connection-pool them later)
        self._connection_list = [
            redis.Redis.from_url(host)
            for host in self.hosts
        ]
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(random.choice(string.ascii_letters) for i in range(8))
        # Register scripts
        connection = self.connection(None)
        self.chansend = connection.register_script(self.lua_chansend)
        self.lpopmany = connection.register_script(self.lua_lpopmany)
        self.delprefix = connection.register_script(self.lua_delprefix)
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, six.string_types):
                raise ValueError("symmetric_encryption_keys must be a list of possible keys")
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError("Cannot run with encryption without 'cryptography' installed.")
            sub_fernets = [self.make_fernet(key) for key in symmetric_encryption_keys]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    ### ASGI API ###

    extensions = ["groups", "flush"]

    def send(self, channel, message):
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Write out message into expiring key (avoids big items in list)
        # TODO: Use extended set, drop support for older redis?
        message_key = self.prefix + uuid.uuid4().hex
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for response
        # channels, random for normal channels
        if "!" in channel or "?" in channel:
            index = self.consistent_hash(channel)
            connection = self.connection(index)
        else:
            connection = self.connection(None)
        # Use the Lua function to do the set-and-push
        try:
            self.chansend(
                keys=[message_key, channel_key],
                args=[self.serialize(message), self.expiry, self.get_capacity(channel)],
            )
        except redis.exceptions.ResponseError as e:
            # The Lua script handles capacity checking and sends the "full" error back
            if e.args[0] == "full":
                raise self.ChannelFull

    def receive_many(self, channels, block=False):
        if not channels:
            return None, None
        channels = list(channels)
        assert all(self.valid_channel_name(channel) for channel in channels), "One or more channel names invalid"
        # Work out what servers to listen on for the given channels
        indexes = {}
        random_index = self.random_index()
        for channel in channels:
            if "!" in channel or "?" in channel:
                indexes.setdefault(self.consistent_hash(channel), []).append(channel)
            else:
                indexes.setdefault(random_index, []).append(channel)
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            connection = self.connection(index)
            channels = indexes[index]
            # Shuffle channels to avoid the first ones starving others of workers
            random.shuffle(channels)
            # Pop off any waiting message
            list_names = [self.prefix + channel for channel in channels]
            if block:
                result = connection.blpop(list_names, timeout=self.blpop_timeout)
            else:
                result = self.lpopmany(keys=list_names, client=connection)
            if result:
                content = connection.get(result[1])
                # If the content key expired, keep going.
                if content is None:
                    continue
                # Return the channel it's from and the message
                return result[0][len(self.prefix):].decode("utf8"), self.deserialize(content)
            else:
                return None, None

    def new_channel(self, pattern):
        assert isinstance(pattern, six.text_type)
        # Keep making channel names till one isn't present.
        while True:
            random_string = "".join(random.choice(string.ascii_letters) for i in range(12))
            assert pattern.endswith("!") or pattern.endswith("?")
            new_name = pattern + random_string
            # Get right connection
            index = self.consistent_hash(new_name)
            connection = self.connection(index)
            # Check to see if it's in the connected Redis.
            # This fails to stop collisions for sharding where the channel is
            # non-single-listener, but that seems very unlikely.
            key = self.prefix + new_name
            if not connection.exists(key):
                return new_name

    ### ASGI Group extension ###

    def group_add(self, group, channel):
        """
        Adds the channel to the named group for at least 'expiry'
        seconds (expiry defaults to message expiry if not provided).
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        group_key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Add to group sorted set with creation time as timestamp
        connection.zadd(
            group_key,
            **{channel: time.time()}
        )
        # Set both expiration to be group_expiry, since everything in
        # it at this point is guaranteed to expire before that
        connection.expire(group_key, self.group_expiry)
        # Also add to a normal set that contains all the groups a channel is in
        # (as yet unused)
        channel_key = self._channel_groups_key(channel)
        connection = self.connection(self.consistent_hash(channel))
        connection.sadd(channel_key, group)
        connection.expire(channel_key, self.group_expiry)

    def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        self.connection(self.consistent_hash(group)).zrem(
            key,
            channel,
        )

    def group_channels(self, group):
        """
        Returns all channels in the group as an iterable.
        """
        key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Discard old channels based on group_expiry
        connection.zremrangebyscore(key, 0, int(time.time()) - self.group_expiry)
        # Return current lot
        return [x.decode("utf8") for x in connection.zrange(
            key,
            0,
            -1,
        )]

    def send_group(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # TODO: More efficient implementation (lua script per shard?)
        for channel in self.group_channels(group):
            try:
                self.send(channel, message)
            except self.ChannelFull:
                pass

    def _group_key(self, group):
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    def _channel_groups_key(self, group):
        return ("%s:chgroups:%s" % (self.prefix, group)).encode("utf8")

    ### Flush extension ###

    def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        for connection in self._connection_list:
            self.delprefix(keys=[], args=[self.prefix+"*"], client=connection)

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Redis Lua scripts ###

    # Single-command channel send. Returns error if over capacity.
    # Keys: message, channel_list
    # Args: content, expiry, capacity
    lua_chansend = """
        if redis.call('llen', KEYS[2]) >= tonumber(ARGV[3]) then
            return redis.error_reply("full")
        end
        redis.call('set', KEYS[1], ARGV[1])
        redis.call('expire', KEYS[1], ARGV[2])
        redis.call('rpush', KEYS[2], KEYS[1])
        redis.call('expire', KEYS[2], ARGV[2] + 1)
    """

    lua_lpopmany = """
        for keyCount = 1, #KEYS do
            local result = redis.call('LPOP', KEYS[keyCount])
            if result then
                return {KEYS[keyCount], result}
            end
        end
        return {nil, nil}
    """

    lua_delprefix = """
        local keys = redis.call('keys', ARGV[1])
        for i=1,#keys,5000 do
            redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
        end
    """

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using MD5, then down to one of the ring nodes.
        """
        if isinstance(value, six.text_type):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xffffffff
        return (bigval // 0x100000) // self.ring_divisor

    def random_index(self):
        return random.randint(0, len(self.hosts) - 1)

    def connection(self, index):
        """
        Returns the correct connection for the current thread.

        Pass key to use a server based on consistent hashing of the key value;
        pass None to use a random server instead.
        """
        # If index is explicitly None, pick a random server
        if index is None:
            index = self.random_index()
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" % (self.ring_size, index))
        return self._connection_list[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, six.text_type):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
Exemplo n.º 31
0
class RedisChannelLayer(BaseChannelLayer):
    """
    Redis channel layer.

    It routes all messages into remote Redis server.  Support for
    sharding among different Redis installations and message
    encryption are provided.  Both synchronous and asynchronous (via
    Twisted) approaches are implemented.
    """

    blpop_timeout = 5
    global_statistics_expiry = 86400
    channel_statistics_expiry = 3600
    global_stats_key = '#global#'  # needs to be invalid as a channel name

    def __init__(
        self,
        expiry=60,
        hosts=None,
        prefix="asgi:",
        group_expiry=86400,
        capacity=100,
        channel_capacity=None,
        symmetric_encryption_keys=None,
        stats_prefix="asgi-meta:",
        connection_kwargs=None,
    ):
        super(RedisChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )
        self.hosts = self._setup_hosts(hosts)

        self.prefix = prefix
        assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
        # Precalculate some values for ring selection
        self.ring_size = len(self.hosts)
        # Create connections ahead of time (they won't call out just yet, but
        # we want to connection-pool them later)
        socket_timeout = connection_kwargs and connection_kwargs.get(
            "socket_timeout", None)
        if socket_timeout and socket_timeout < self.blpop_timeout:
            raise ValueError("The socket timeout must be at least %s seconds" %
                             self.blpop_timeout)
        self._connection_list = self._generate_connections(
            redis_kwargs=connection_kwargs or {}, )
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8))
        self._register_scripts()
        self._setup_encryption(symmetric_encryption_keys)
        self.stats_prefix = stats_prefix

    def _setup_hosts(self, hosts):
        # Make sure they provided some hosts, or provide a default
        final_hosts = list()
        if not hosts:
            hosts = [("localhost", 6379)]

        if isinstance(hosts, six.string_types):
            # user accidentally used one host string instead of providing a list of hosts
            raise ValueError(
                'ASGI Redis hosts must be specified as an iterable list of hosts.'
            )

        for entry in hosts:
            if isinstance(entry, six.string_types):
                final_hosts.append(entry)
            else:
                final_hosts.append("redis://%s:%d/0" % (entry[0], entry[1]))
        return final_hosts

    def _register_scripts(self):
        connection = self.connection(None)
        self.chansend = connection.register_script(self.lua_chansend)
        self.lpopmany = connection.register_script(self.lua_lpopmany)
        self.delprefix = connection.register_script(self.lua_delprefix)
        self.incrstatcounters = connection.register_script(
            self.lua_incrstatcounters)

    def _setup_encryption(self, symmetric_encryption_keys):
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, six.string_types):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [
                self.make_fernet(key) for key in symmetric_encryption_keys
            ]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    def _generate_connections(self, redis_kwargs):
        return [
            redis.Redis.from_url(host, **redis_kwargs) for host in self.hosts
        ]

    ### ASGI API ###

    extensions = ["groups", "flush", "statistics"]
    try:
        import txredisapi
    except ImportError:
        pass
    else:
        extensions.append("twisted")

    def send(self, channel, message):
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Make sure the message does not contain reserved keys
        assert "__asgi_channel__" not in message
        # If it's a process-local channel, strip off local part and stick full name in message
        if "!" in channel:
            message = dict(message.items())
            message['__asgi_channel__'] = channel
            channel = self.non_local_name(channel)
        # Write out message into expiring key (avoids big items in list)
        # TODO: Use extended set, drop support for older redis?
        message_key = self.prefix + uuid.uuid4().hex
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for response
        # channels, random for normal channels
        if "!" in channel or "?" in channel:
            index = self.consistent_hash(channel)
            connection = self.connection(index)
        else:
            connection = self.connection(None)
        # Use the Lua function to do the set-and-push
        try:
            self.chansend(
                keys=[message_key, channel_key],
                args=[
                    self.serialize(message), self.expiry,
                    self.get_capacity(channel)
                ],
                client=connection,
            )
            self._incr_statistics_counter(
                stat_name=self.STAT_MESSAGES_COUNT,
                channel=channel,
                connection=connection,
            )
        except redis.exceptions.ResponseError as e:
            # The Lua script handles capacity checking and sends the "full" error back
            if e.args[0] == "full":
                self._incr_statistics_counter(
                    stat_name=self.STAT_CHANNEL_FULL,
                    channel=channel,
                    connection=connection,
                )
                raise self.ChannelFull
            elif "unknown command" in e.args[0]:
                raise UnsupportedRedis(
                    "Redis returned an error (%s). Please ensure you're running a "
                    " version of redis that is supported by asgi_redis." %
                    e.args[0])
            else:
                # Let any other exception bubble up
                raise

    def receive(self, channels, block=False):
        # List name get
        indexes = self._receive_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            return None, None
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Open a connection
            connection = self.connection(index)
            # Pop off any waiting message
            if block:
                try:
                    result = connection.blpop(list_names,
                                              timeout=self.blpop_timeout)
                except redis.exceptions.TimeoutError:
                    continue
            else:
                result = self.lpopmany(keys=list_names, client=connection)
            if result:
                content = connection.get(result[1])
                # If the content key expired, keep going.
                if content is None:
                    continue
                # Return the channel it's from and the message
                channel = result[0][len(self.prefix):].decode("utf8")
                message = self.deserialize(content)
                # If there is a full channel name stored in the message, unpack it.
                if "__asgi_channel__" in message:
                    channel = message['__asgi_channel__']
                    del message['__asgi_channel__']
                return channel, message
            else:
                return None, None

    def _receive_list_names(self, channels):
        """
        Inner logic of receive; takes channels, groups by shard, and
        returns {connection_index: list_names ...} if a query is needed or
        None for a vacuously empty response.
        """
        # Short circuit if no channels
        if not channels:
            return None
        # Check channel names are valid
        channels = list(channels)
        assert all(
            self.valid_channel_name(channel, receive=True)
            for channel in channels), "One or more channel names invalid"
        # Work out what servers to listen on for the given channels
        indexes = {}
        random_index = self.random_index()
        for channel in channels:
            if "!" in channel or "?" in channel:
                indexes.setdefault(self.consistent_hash(channel),
                                   []).append(self.prefix + channel, )
            else:
                indexes.setdefault(random_index,
                                   []).append(self.prefix + channel, )
        return indexes

    def new_channel(self, pattern):
        assert isinstance(pattern, six.text_type)
        # Keep making channel names till one isn't present.
        while True:
            random_string = "".join(
                random.choice(string.ascii_letters) for i in range(12))
            assert pattern.endswith("?")
            new_name = pattern + random_string
            # Get right connection
            index = self.consistent_hash(new_name)
            connection = self.connection(index)
            # Check to see if it's in the connected Redis.
            # This fails to stop collisions for sharding where the channel is
            # non-single-listener, but that seems very unlikely.
            key = self.prefix + new_name
            if not connection.exists(key):
                return new_name

    ### ASGI Group extension ###

    def group_add(self, group, channel):
        """
        Adds the channel to the named group for at least 'expiry'
        seconds (expiry defaults to message expiry if not provided).
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        group_key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Add to group sorted set with creation time as timestamp
        connection.zadd(group_key, **{channel: time.time()})
        # Set both expiration to be group_expiry, since everything in
        # it at this point is guaranteed to expire before that
        connection.expire(group_key, self.group_expiry)

    def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        self.connection(self.consistent_hash(group)).zrem(
            key,
            channel,
        )

    def group_channels(self, group):
        """
        Returns all channels in the group as an iterable.
        """
        key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Discard old channels based on group_expiry
        connection.zremrangebyscore(key, 0,
                                    int(time.time()) - self.group_expiry)
        # Return current lot
        return [x.decode("utf8") for x in connection.zrange(
            key,
            0,
            -1,
        )]

    def send_group(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # TODO: More efficient implementation (lua script per shard?)
        for channel in self.group_channels(group):
            try:
                self.send(channel, message)
            except self.ChannelFull:
                pass

    def _group_key(self, group):
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    ### Flush extension ###

    def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        for connection in self._connection_list:
            self.delprefix(keys=[],
                           args=[self.prefix + "*"],
                           client=connection)
            self.delprefix(keys=[],
                           args=[self.stats_prefix + "*"],
                           client=connection)

    ### Twisted extension ###

    @defer.inlineCallbacks
    def receive_twisted(self, channels):
        """
        Twisted-native implementation of receive.
        """
        # List name get
        indexes = self._receive_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            defer.returnValue((None, None))
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Get a sync connection for conn details
            sync_connection = self.connection(index)
            twisted_connection = yield txredisapi.ConnectionPool(
                host=sync_connection.connection_pool.connection_kwargs['host'],
                port=sync_connection.connection_pool.connection_kwargs['port'],
                dbid=sync_connection.connection_pool.connection_kwargs['db'],
                password=sync_connection.connection_pool.
                connection_kwargs['password'],
            )
            try:
                # Pop off any waiting message
                result = yield twisted_connection.blpop(
                    list_names, timeout=self.blpop_timeout)
                if result:
                    content = yield twisted_connection.get(result[1])
                    # If the content key expired, keep going.
                    if content is None:
                        continue
                    # Return the channel it's from and the message
                    channel = result[0][len(self.prefix):]
                    message = self.deserialize(content)
                    # If there is a full channel name stored in the message, unpack it.
                    if "__asgi_channel__" in message:
                        channel = message['__asgi_channel__']
                        del message['__asgi_channel__']
                    defer.returnValue((channel, message))
                else:
                    defer.returnValue((None, None))
            finally:
                yield twisted_connection.disconnect()

    ### statistics extension ###

    STAT_MESSAGES_COUNT = 'messages_count'
    STAT_MESSAGES_PENDING = 'messages_pending'
    STAT_MESSAGES_MAX_AGE = 'messages_max_age'
    STAT_CHANNEL_FULL = 'channel_full_count'

    def global_statistics(self):
        """
        Returns dictionary of statistics across all channels on all shards.
        Return value is a dictionary with following fields:
            * messages_count, the number of messages processed since server start
            * channel_full_count, the number of times ChannelFull exception has been risen since server start

        This implementation does not provide calculated per second values.
        Due perfomance concerns, does not provide aggregated messages_pending and messages_max_age,
        these are only avaliable per channel.

        """
        return self._count_global_stats(self._connection_list)

    def _count_global_stats(self, connection_list):
        statistics = {
            self.STAT_MESSAGES_COUNT: 0,
            self.STAT_CHANNEL_FULL: 0,
        }
        prefix = self.stats_prefix + self.global_stats_key
        for connection in connection_list:
            messages_count, channel_full_count = connection.mget(
                ':'.join((prefix, self.STAT_MESSAGES_COUNT)),
                ':'.join((prefix, self.STAT_CHANNEL_FULL)),
            )
            statistics[self.STAT_MESSAGES_COUNT] += int(messages_count or 0)
            statistics[self.STAT_CHANNEL_FULL] += int(channel_full_count or 0)

        return statistics

    def channel_statistics(self, channel):
        """
        Returns dictionary of statistics for specified channel.
        Return value is a dictionary with following fields:
            * messages_count, the number of messages processed since server start
            * messages_pending, the current number of messages waiting
            * messages_max_age, how long the oldest message has been waiting, in seconds
            * channel_full_count, the number of times ChannelFull exception has been risen since server start

        This implementation does not provide calculated per second values
        """
        if "!" in channel or "?" in channel:
            connections = [self.connection(self.consistent_hash(channel))]
        else:
            # if we don't know where it is, we have to check in all shards
            connections = self._connection_list
        return self._count_channel_stats(channel, connections)

    def _count_channel_stats(self, channel, connections):
        statistics = {
            self.STAT_MESSAGES_COUNT: 0,
            self.STAT_MESSAGES_PENDING: 0,
            self.STAT_MESSAGES_MAX_AGE: 0,
            self.STAT_CHANNEL_FULL: 0,
        }
        prefix = self.stats_prefix + channel

        channel_key = self.prefix + channel
        for connection in connections:
            messages_count, channel_full_count = connection.mget(
                ':'.join((prefix, self.STAT_MESSAGES_COUNT)),
                ':'.join((prefix, self.STAT_CHANNEL_FULL)),
            )
            statistics[self.STAT_MESSAGES_COUNT] += int(messages_count or 0)
            statistics[self.STAT_CHANNEL_FULL] += int(channel_full_count or 0)
            statistics[self.STAT_MESSAGES_PENDING] += connection.llen(
                channel_key)
            oldest_message = connection.lindex(channel_key, 0)
            if oldest_message:
                messages_age = self.expiry - connection.ttl(oldest_message)
                statistics[self.STAT_MESSAGES_MAX_AGE] = max(
                    statistics[self.STAT_MESSAGES_MAX_AGE], messages_age)
        return statistics

    def _incr_statistics_counter(self, stat_name, channel, connection):
        """ helper function to intrement counter stats in one go """
        self.incrstatcounters(
            keys=[
                "{prefix}{channel}:{stat_name}".format(
                    prefix=self.stats_prefix,
                    channel=channel,
                    stat_name=stat_name,
                ), "{prefix}{global_key}:{stat_name}".format(
                    prefix=self.stats_prefix,
                    global_key=self.global_stats_key,
                    stat_name=stat_name,
                )
            ],
            args=[
                self.channel_statistics_expiry, self.global_statistics_expiry
            ],
            client=connection,
        )

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Redis Lua scripts ###

    # Single-command channel send. Returns error if over capacity.
    # Keys: message, channel_list
    # Args: content, expiry, capacity
    lua_chansend = """
        if redis.call('llen', KEYS[2]) >= tonumber(ARGV[3]) then
            return redis.error_reply("full")
        end
        redis.call('set', KEYS[1], ARGV[1])
        redis.call('expire', KEYS[1], ARGV[2])
        redis.call('rpush', KEYS[2], KEYS[1])
        redis.call('expire', KEYS[2], ARGV[2] + 1)
    """

    # Single-command to increment counter stats.
    # Keys: channel_stat, global_stat
    # Args: channel_stat_expiry, global_stat_expiry
    lua_incrstatcounters = """
        redis.call('incr', KEYS[1])
        redis.call('expire', KEYS[1], ARGV[1])
        redis.call('incr', KEYS[2])
        redis.call('expire', KEYS[2], ARGV[2])

    """

    lua_lpopmany = """
        for keyCount = 1, #KEYS do
            local result = redis.call('LPOP', KEYS[keyCount])
            if result then
                return {KEYS[keyCount], result}
            end
        end
        return {nil, nil}
    """

    lua_delprefix = """
        local keys = redis.call('keys', ARGV[1])
        for i=1,#keys,5000 do
            redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
        end
    """

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using CRC, then down to one of the ring nodes.
        """
        if isinstance(value, six.text_type):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xfff
        ring_divisor = 4096 / float(self.ring_size)
        return int(bigval / ring_divisor)

    def random_index(self):
        return random.randint(0, len(self.hosts) - 1)

    def connection(self, index):
        """
        Returns the correct connection for the current thread.

        Pass key to use a server based on consistent hashing of the key value;
        pass None to use a random server instead.
        """
        # If index is explicitly None, pick a random server
        if index is None:
            index = self.random_index()
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" %
                             (self.ring_size, index))
        return self._connection_list[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, six.text_type):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
Exemplo n.º 32
0
 def fernet(self):
     if len(self.fernet_keys) == 1:
         return Fernet(self.fernet_keys[0])
     return MultiFernet([Fernet(k) for k in self.fernet_keys])
Exemplo n.º 33
0
    if os.path.exists(key_path):
        os.unlink(key_path)

# generate new keys if the keygen file does not exist
if not os.path.isfile(key_path):
    count = int(config.keygen_keycount)
    logger.info('generating %d keys', count)
    with open(key_path, 'wb') as fp:
        for _ in range(count):
            fp.write(Fernet.generate_key() + b'\n')

# always load in new keys whether they were generated or stored
with open(key_path, 'rb') as fp:
    keys = list(map(bytes.strip, fp))

enc = MultiFernet([Fernet(key) for key in keys])


# ~~~~~~ HASHING
@curry
def _hash(fn, buffer: Union[io.StringIO, io.BytesIO]):
    """Partial function for generating checksum of binary content."""

    buffer.seek(0)
    hashsum = fn()
    for chunk in iter(lambda: buffer.read(4096), b''):
        hashsum.update(chunk)
    return hashsum.hexdigest()


@curry
Exemplo n.º 34
0
 def save(self, *args, **kwargs):
     keys = kwargs.pop('keys')
     fkeys = [Fernet(key) for key in keys]
     encrypted_data = MultiFernet(fkeys).encrypt(self.data)
     self.data = encrypted_data
     return super(Record, self).save(*args, **kwargs)
Exemplo n.º 35
0
 def __init__(self, fernet_keys=None):
     super().__init__()
     fernet_keys = fernet_keys or settings.LAYERS_MANAGER_BALANCER_FERNET_KEYS
     self.fernet = MultiFernet([Fernet(key) for key in fernet_keys])
Exemplo n.º 36
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval", "max_data"]

    def __init__(
        self,
        crypto_key=None,
        datadog_api_key=None,
        datadog_app_key=None,
        datadog_flush_interval=None,
        hostname=None,
        port=None,
        router_scheme=None,
        router_hostname=None,
        router_port=None,
        endpoint_scheme=None,
        endpoint_hostname=None,
        endpoint_port=None,
        router_conf={},
        router_tablename="router",
        router_read_throughput=5,
        router_write_throughput=5,
        storage_tablename="storage",
        storage_read_throughput=5,
        storage_write_throughput=5,
        message_tablename="message",
        message_read_throughput=5,
        message_write_throughput=5,
        statsd_host="localhost",
        statsd_port=8125,
        resolve_hostname=False,
        max_data=4096,
        # Reflected up from UDP Router
        wake_timeout=0,
        env='development',
        enable_cors=False,
        s3_bucket=DEFAULT_BUCKET,
        senderid_expry=SENDERID_EXPRY,
        senderid_list={},
        hello_timeout=0,
    ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval)
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(router_scheme or 'http',
                                        self.router_hostname, router_port)

        self.endpoint_url = canonical_url(endpoint_scheme or 'http',
                                          self.endpoint_hostname,
                                          endpoint_port)

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.message_table = get_message_table(message_tablename,
                                               message_read_throughput,
                                               message_write_throughput)
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)
        self.message = Message(self.message_table, self.metrics)

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self, router_conf.get("simplepush"))
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_endpoint(self, uaid, chid):
        """ Create an endpoint from the identifiers"""
        return self.endpoint_url + '/push/' + \
            self.fernet.encrypt((uaid + ':' + chid).encode('utf8'))
Exemplo n.º 37
0
class RedisChannelLayer(BaseChannelLayer):
    """
    Redis channel layer.

    It routes all messages into remote Redis server. Support for
    sharding among different Redis installations and message
    encryption are provided.
    """

    blpop_timeout = 5
    local_poll_interval = 0.01

    def __init__(
        self,
        hosts=None,
        prefix="asgi:",
        expiry=60,
        group_expiry=86400,
        capacity=100,
        channel_capacity=None,
        symmetric_encryption_keys=None,
    ):
        # Store basic information
        self.expiry = expiry
        self.group_expiry = group_expiry
        self.capacity = capacity
        self.channel_capacity = channel_capacity or {}
        self.prefix = prefix
        self.pools = {}
        assert isinstance(self.prefix, str), "Prefix must be unicode"
        # Configure the host objects
        self.hosts = self.decode_hosts(hosts)
        self.ring_size = len(self.hosts)
        # Normal channels choose a host index by cycling through the available hosts
        self._receive_index_generator = itertools.cycle(range(len(self.hosts)))
        self._send_index_generator = itertools.cycle(range(len(self.hosts)))
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(random.choice(string.ascii_letters) for i in range(8))
        # Set up any encryption objects
        self._setup_encryption(symmetric_encryption_keys)
        # Buffered messages by process-local channel name
        self.receive_buffer = {}
        # Coroutines currently receiving the process-local channel.
        self.receive_tasks = {}

    def decode_hosts(self, hosts):
        """
        Takes the value of the "hosts" argument passed to the class and returns
        a list of kwargs to use for the Redis connection constructor.
        """
        # If no hosts were provided, return a default value
        if not hosts:
            return {"address": ("localhost", 6379)}
        # If they provided just a string, scold them.
        if isinstance(hosts, (str, bytes)):
            raise ValueError("You must pass a list of Redis hosts, even if there is only one.")
        # Decode each hosts entry into a kwargs dict
        result = []
        for entry in hosts:
            result.append({
                "address": entry,
            })
        return result

    def _setup_encryption(self, symmetric_encryption_keys):
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, (str, bytes)):
                raise ValueError("symmetric_encryption_keys must be a list of possible keys")
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError("Cannot run with encryption without 'cryptography' installed.")
            sub_fernets = [self.make_fernet(key) for key in symmetric_encryption_keys]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    ### Channel layer API ###

    extensions = ["groups", "flush"]

    async def send(self, channel, message):
        """
        Send a message onto a (general or specific) channel.
        """
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Make sure the message does not contain reserved keys
        assert "__asgi_channel__" not in message
        # If it's a process-local channel, strip off local part and stick full name in message
        if "!" in channel:
            message = dict(message.items())
            message["__asgi_channel__"] = channel
            channel = self.non_local_name(channel)
        # Write out message into expiring key (avoids big items in list)
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for specific
        # channels, random for general channels
        if "!" in channel:
            index = self.consistent_hash(channel)
            pool = await self.connection(index)
        else:
            index = next(self._send_index_generator)
            pool = await self.connection(index)
        with (await pool) as connection:
            # Check the length of the list before send
            # This can allow the list to leak slightly over capacity, but that's fine.
            if await connection.llen(channel_key) >= self.get_capacity(channel):
                raise ChannelFull()
            # Push onto the list then set it to expire in case it's not consumed
            await connection.rpush(channel_key, self.serialize(message))
            await connection.expire(channel_key, int(self.expiry))

    async def receive(self, channel):
        """
        Receive the first message that arrives on the channel.
        If more than one coroutine waits on the same channel, the first waiter
        will be given the message when it arrives.
        """
        # Make sure the channel name is valid then get the non-local part
        # and thus its index
        assert self.valid_channel_name(channel)
        if "!" in channel:
            real_channel = self.non_local_name(channel)
            assert real_channel.endswith(self.client_prefix + "!"), "Wrong client prefix"
            # Make sure a receive task is running
            task = self.receive_tasks.get(real_channel, None)
            if task is not None and task.done():
                task = None
            if task is None:
                self.receive_tasks[real_channel] = asyncio.ensure_future(
                    self.receive_loop(real_channel),
                )
            # Wait on the receive buffer's contents
            return await self.receive_buffer_lpop(channel)
        else:
            # Do a plain direct receive
            return (await self.receive_single(channel))[1]

    async def receive_loop(self, channel):
        """
        Continuous-receiving loop that fetches results into the receive buffer.
        """
        assert "!" in channel, "receive_loop called on non-process-local channel"
        while True:
            # Catch RuntimeErrors from the loop stopping while we release
            # a connection. Wish there was a cleaner solution here.
            real_channel, message = await self.receive_single(channel)
            self.receive_buffer.setdefault(real_channel, []).append(message)

    async def receive_single(self, channel):
        """
        Receives a single message off of the channel and returns it.
        """
        # Check channel name
        assert self.valid_channel_name(channel, receive=True), "Channel name invalid"
        # Work out the connection to use
        if "!" in channel:
            assert channel.endswith("!")
            index = self.consistent_hash(channel)
        else:
            index = next(self._receive_index_generator)
        # Get that connection and receive off of it
        pool = await self.connection(index)
        with (await pool) as connection:
            channel_key = self.prefix + channel
            content = None
            while content is None:
                content = await connection.blpop(channel_key, timeout=self.blpop_timeout)
            # Message decode
            message = self.deserialize(content[1])
            # TODO: message expiry?
            # If there is a full channel name stored in the message, unpack it.
            if "__asgi_channel__" in message:
                channel = message["__asgi_channel__"]
                del message["__asgi_channel__"]
            return channel, message

    async def receive_buffer_lpop(self, channel):
        """
        Atomic, async method that returns the left-hand item in a receive buffer.
        """
        # TODO: Use locks or something, not a poll
        while True:
            if self.receive_buffer.get(channel, None):
                message = self.receive_buffer[channel][0]
                if len(self.receive_buffer[channel]) == 1:
                    del self.receive_buffer[channel]
                else:
                    self.receive_buffer[channel] = self.receive_buffer[channel][1:]
                return message
            else:
                # See if we need to propagate a dead receiver exception
                real_channel = self.non_local_name(channel)
                task = self.receive_tasks.get(real_channel, None)
                if task is not None and task.done():
                    task.result()
                # Sleep poll
                await asyncio.sleep(self.local_poll_interval)

    async def new_channel(self, prefix="specific."):
        """
        Returns a new channel name that can be used by something in our
        process as a specific channel.
        """
        # TODO: Guarantee uniqueness better?
        return "%s.%s!%s" % (
            prefix,
            self.client_prefix,
            "".join(random.choice(string.ascii_letters) for i in range(12)),
        )

    ### Flush extension ###

    async def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        # Lua deletion script
        delete_prefix = """
            local keys = redis.call('keys', ARGV[1])
            for i=1,#keys,5000 do
                redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
            end
        """
        # Go through each connection and remove all with prefix
        for i in range(self.ring_size):
            connection = await self.connection(i)
            await connection.eval(
                delete_prefix,
                keys=[],
                args=[self.prefix + "*"]
            )

    async def close(self):
        # Stop all reader tasks
        for task in self.receive_tasks.values():
            task.cancel()
        asyncio.wait(self.receive_tasks.values())
        self.receive_tasks = {}
        # Close up all pools
        for pool in self.pools.values():
            pool.close()
            await pool.wait_closed()

    ### Groups extension ###

    async def group_add(self, group, channel):
        """
        Adds the channel name to a group.
        """
        # Check the inputs
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Get a connection to the right shard
        group_key = self._group_key(group)
        pool = await self.connection(self.consistent_hash(group))
        with (await pool) as connection:
            # Add to group sorted set with creation time as timestamp
            await connection.zadd(
                group_key,
                time.time(),
                channel,
            )
            # Set expiration to be group_expiry, since everything in
            # it at this point is guaranteed to expire before that
            await connection.expire(group_key, self.group_expiry)

    async def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        pool = await self.connection(self.consistent_hash(group))
        await pool.zrem(
            key,
            channel,
        )

    async def group_send(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # Retrieve list of all channel names
        key = self._group_key(group)
        pool = await self.connection(self.consistent_hash(group))
        with (await pool) as connection:
            # Discard old channels based on group_expiry
            await connection.zremrangebyscore(key, min=0, max=int(time.time()) - self.group_expiry)
            # Return current lot
            channel_names = [
                x.decode("utf8") for x in
                await connection.zrange(key, 0, -1)
            ]
        # TODO: More efficient implementation (lua script per shard?)
        for channel in channel_names:
            try:
                await self.send(channel, message)
            except ChannelFull:
                pass

    def _group_key(self, group):
        """
        Common function to make the storage key for the group.
        """
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using CRC, then down to one of the ring nodes.
        """
        if isinstance(value, str):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xfff
        ring_divisor = 4096 / float(self.ring_size)
        return int(bigval / ring_divisor)

    async def connection(self, index):
        """
        Returns the correct connection for the index given.
        Lazily instantiates pools.
        """
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" % (self.ring_size, index))
        # Make a pool if needed and return it
        if index not in self.pools:
            self.pools[index] = await aioredis.create_redis_pool(**self.hosts[index])
        return self.pools[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, str):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
Exemplo n.º 38
0
class AutopushConfig(object):
    """Main Autopush Settings Object"""

    debug = attrib(default=False)  # type: bool

    fernet = attrib(init=False)  # type: MultiFernet
    _crypto_key = attrib(
        converter=_init_crypto_key, default=None)  # type: List[str]

    bear_hash_key = attrib(default=Factory(list))  # type: List[str]
    human_logs = attrib(default=True)  # type: bool

    hostname = attrib(default=None)  # type: Optional[str]
    port = attrib(default=None)  # type: Optional[int]
    _resolve_hostname = attrib(default=False)  # type: bool

    router_scheme = attrib(default=None)  # type: Optional[str]
    router_hostname = attrib(default=None)  # type: Optional[str]
    router_port = attrib(default=None)  # type: Optional[int]

    endpoint_scheme = attrib(default=None)  # type: Optional[str]
    endpoint_hostname = attrib(default=None)  # type: Optional[str]
    endpoint_port = attrib(default=None)  # type: Optional[int]

    proxy_protocol_port = attrib(default=None)  # type: Optional[int]
    memusage_port = attrib(default=None)  # type: Optional[int]

    statsd_host = attrib(default="localhost")  # type: str
    statsd_port = attrib(default=8125)  # type: int
    megaphone_api_url = attrib(default=None)  # type: Optional[str]
    megaphone_api_token = attrib(default=None)  # type: Optional[str]
    megaphone_poll_interval = attrib(default=30)  # type: int

    datadog_api_key = attrib(default=None)  # type: Optional[str]
    datadog_app_key = attrib(default=None)  # type: Optional[str]
    datadog_flush_interval = attrib(default=None)  # type: Optional[int]

    router_table = _nested(
        DDBTableConfig,
        default=dict(tablename="router")
    )  # type: DDBTableConfig
    message_table = _nested(
        DDBTableConfig,
        default=dict(tablename="message")
    )  # type: DDBTableConfig

    preflight_uaid = attrib(
        default="deadbeef00000000deadbeef00000000")  # type: str

    ssl = _nested(SSLConfig, default=Factory(SSLConfig))  # type: SSLConfig
    router_ssl = _nested(
        SSLConfig, default=Factory(SSLConfig))  # type: SSLConfig
    client_certs = attrib(default=None)  # type: Optional[Dict[str, str]]

    router_url = attrib(init=False)  # type: str
    endpoint_url = attrib(init=False)  # type: str
    ws_url = attrib(init=False)  # type: str

    router_conf = attrib(default=Factory(dict))  # type: JSONDict

    # twisted Agent's connectTimeout
    connect_timeout = attrib(default=0.5)  # type: float
    max_data = attrib(default=4096)  # type: int
    env = attrib(default='development')  # type: str
    ami_id = attrib(default=None)  # type: Optional[str]
    cors = attrib(default=False)  # type: bool

    hello_timeout = attrib(default=0)  # type: int
    # Force timeout in idle seconds
    msg_limit = attrib(default=100)  # type: int
    auto_ping_interval = attrib(default=None)  # type: Optional[int]
    auto_ping_timeout = attrib(default=None)  # type: Optional[int]
    max_connections = attrib(default=None)  # type: Optional[int]
    close_handshake_timeout = attrib(default=None)  # type: Optional[int]

    # Generate messages per legacy rules, only used for testing to
    # generate legacy data.
    _notification_legacy = attrib(default=False)  # type: bool

    # Use the cryptography library
    use_cryptography = attrib(default=False)  # type: bool

    # Strict-Transport-Security max age (Default 1 year in secs)
    sts_max_age = attrib(default=31536000)  # type: int

    # Don't cache ssl.wrap_socket's SSLContexts
    no_sslcontext_cache = attrib(default=False)  # type: bool

    # DynamoDB endpoint override
    aws_ddb_endpoint = attrib(default=None)  # type: str

    allow_table_rotation = attrib(default=True)  # type: bool

    def __attrs_post_init__(self):
        """Initialize the Settings object"""
        # Setup hosts/ports/urls
        if not self.hostname:
            self.hostname = socket.gethostname()
        if self._resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        if not self.endpoint_hostname:
            self.endpoint_hostname = self.hostname
        if not self.router_hostname:
            self.router_hostname = self.hostname

        self.router_url = canonical_url(
            self.router_scheme or 'http',
            self.router_hostname,
            self.router_port
        )
        self.endpoint_url = canonical_url(
            self.endpoint_scheme or 'http',
            self.endpoint_hostname,
            self.endpoint_port
        )
        # not accurate under autoendpoint (like router_url)
        self.ws_url = "{}://{}:{}/".format(
            'wss' if self.ssl.key else 'ws',
            self.hostname,
            self.port
        )

        self.fernet = MultiFernet([Fernet(key) for key in self._crypto_key])

    @property
    def enable_tls_auth(self):
        """Whether TLS authentication w/ client certs is enabled"""
        return self.client_certs is not None

    @classmethod
    def from_argparse(cls, ns, **kwargs):
        # type: (Namespace, **Any) -> AutopushConfig
        """Create an instance from argparse/additional kwargs"""
        router_conf = {}
        if ns.key_hash:
            db.key_hash = ns.key_hash
        if ns.apns_creds:
            # if you have the critical elements for each external
            # router, create it
            try:
                router_conf["apns"] = json.loads(ns.apns_creds)
            except (ValueError, TypeError):
                raise InvalidConfig(
                    "Invalid JSON specified for APNS config options")
        if ns.senderid_list:
            # Create a common gcmclient
            try:
                sender_ids = json.loads(ns.senderid_list)
            except (ValueError, TypeError):
                raise InvalidConfig("Invalid JSON specified for senderid_list")
            try:
                # This is an init check to verify that things are
                # configured correctly. Otherwise errors may creep in
                # later that go unaccounted.
                sender_ids[sender_ids.keys()[0]]
            except (IndexError, TypeError):
                raise InvalidConfig("No GCM SenderIDs specified or found.")
            router_conf["gcm"] = {"ttl": ns.gcm_ttl,
                                  "dryrun": ns.gcm_dryrun,
                                  "max_data": ns.max_data,
                                  "collapsekey": ns.gcm_collapsekey,
                                  "senderIDs": sender_ids,
                                  "endpoint": ns.gcm_endpoint}
        client_certs = None
        # endpoint only
        if getattr(ns, 'client_certs', None):
            try:
                client_certs_arg = json.loads(ns.client_certs)
            except (ValueError, TypeError):
                raise InvalidConfig("Invalid JSON specified for client_certs")
            if client_certs_arg:
                if not ns.ssl_key:
                    raise InvalidConfig("client_certs specified without SSL "
                                        "enabled (no ssl_key specified)")
                client_certs = {}
                for name, sigs in client_certs_arg.iteritems():
                    if not isinstance(sigs, list):
                        raise InvalidConfig(
                            "Invalid JSON specified for client_certs")
                    for sig in sigs:
                        sig = sig.upper()
                        if (not name or not CLIENT_SHA256_RE.match(sig) or
                                sig in client_certs):
                            raise InvalidConfig(
                                "Invalid client_certs argument")
                        client_certs[sig] = name

        if ns.fcm_creds:
            try:
                router_conf["fcm"] = {
                    "version": ns.fcm_version,
                    "ttl": ns.fcm_ttl,
                    "dryrun": ns.fcm_dryrun,
                    "max_data": ns.max_data,
                    "collapsekey": ns.fcm_collapsekey,
                    "creds": json.loads(ns.fcm_creds)
                }
                if not router_conf["fcm"]["creds"]:
                    raise InvalidConfig(
                        "Empty credentials for FCM config options"
                    )
                for creds in router_conf["fcm"]["creds"].values():
                    if "auth" not in creds:
                        raise InvalidConfig(
                            "Missing auth for FCM config options"
                        )
            except (ValueError, TypeError):
                raise InvalidConfig(
                    "Invalid JSON specified for FCM config options"
                )

        if ns.adm_creds:
            # Create a common admclient
            try:
                router_conf["adm"] = json.loads(ns.adm_creds)
            except (ValueError, TypeError):
                raise InvalidConfig(
                    "Invalid JSON specified for ADM config options")

        ami_id = None
        # Not a fan of double negatives, but this makes more
        # understandable args
        if not ns.no_aws:
            ami_id = get_amid() or "Unknown"

        allow_table_rotation = not ns.no_table_rotation
        return cls(
            crypto_key=ns.crypto_key,
            datadog_api_key=ns.datadog_api_key,
            datadog_app_key=ns.datadog_app_key,
            datadog_flush_interval=ns.datadog_flush_interval,
            hostname=ns.hostname,
            statsd_host=ns.statsd_host,
            statsd_port=ns.statsd_port,
            router_conf=router_conf,
            resolve_hostname=ns.resolve_hostname,
            ami_id=ami_id,
            client_certs=client_certs,
            msg_limit=ns.msg_limit,
            connect_timeout=ns.connection_timeout,
            memusage_port=ns.memusage_port,
            use_cryptography=ns.use_cryptography,
            no_sslcontext_cache=ns._no_sslcontext_cache,
            router_table=dict(
                tablename=ns.router_tablename,
                read_throughput=ns.router_read_throughput,
                write_throughput=ns.router_write_throughput
            ),
            message_table=dict(
                tablename=ns.message_tablename,
                read_throughput=ns.message_read_throughput,
                write_throughput=ns.message_write_throughput
            ),
            ssl=dict(
                key=ns.ssl_key,
                cert=ns.ssl_cert,
                dh_param=ns.ssl_dh_param
            ),
            sts_max_age=ns.sts_max_age,
            allow_table_rotation=allow_table_rotation,
            **kwargs
        )

    def make_endpoint(self, uaid, chid, key=None):
        """Create an v1 or v2 WebPush endpoint from the identifiers.

        Both endpoints use bytes instead of hex to reduce ID length.
        v1 is the uaid + chid
        v2 is the uaid + chid + sha256(key).bytes

        :param uaid: User Agent Identifier
        :param chid: Channel or Subscription ID
        :param key: Optional Base64 URL-encoded application server key
        :returns: Push endpoint

        """
        root = self.endpoint_url + '/wpush/'
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))

        if key is None:
            return root + 'v1/' + self.fernet.encrypt(base).strip('=')

        raw_key = base64url_decode(key.encode('utf8'))
        ep = self.fernet.encrypt(base + sha256(raw_key).digest()).strip('=')
        return root + 'v2/' + ep

    def parse_endpoint(self, metrics, token, version="v1", ckey_header=None,
                       auth_header=None):
        """Parse an endpoint into component elements of UAID, CHID and optional
        key hash if v2

        :param token: The obscured subscription data.
        :param version: This is the API version of the token.
        :param ckey_header: the Crypto-Key header bearing the public key
            (from Crypto-Key: p256ecdsa=)
        :param auth_header: The Authorization header bearing the VAPID info

        :raises ValueError: In the case of a malformed endpoint.

        :returns: a dict containing (uaid=UAID, chid=CHID, public_key=KEY)

        """
        token = self.fernet.decrypt(repad(token).encode('utf8'))
        public_key = None
        if ckey_header:
            try:
                crypto_key = CryptoKey(ckey_header)
            except CryptoKeyException:
                raise InvalidTokenException("Invalid key data")
            public_key = crypto_key.get_label('p256ecdsa')
        if auth_header:
            vapid_auth = parse_auth_header(auth_header)
            if not vapid_auth:
                raise VapidAuthException("Invalid Auth token")
            metrics.increment("notification.auth",
                              tags="vapid:{version},scheme:{scheme}".format(
                                  **vapid_auth
                              ).split(","))
            # pull the public key from the VAPID auth header if needed
            try:
                if vapid_auth['version'] != 1:
                    public_key = vapid_auth['k']
            except KeyError:
                raise VapidAuthException("Missing Public Key")
        if version == 'v1' and len(token) != 32:
            raise InvalidTokenException("Corrupted push token")
        if version == 'v2':
            if not auth_header:
                raise VapidAuthException("Missing Authorization Header")
            if len(token) != 64:
                raise InvalidTokenException("Corrupted push token")
            if not public_key:
                raise VapidAuthException("Invalid key data")
            try:
                decoded_key = base64url_decode(public_key)
            except TypeError:
                raise VapidAuthException("Invalid key data")
            if not constant_time.bytes_eq(sha256(decoded_key).digest(),
                                          token[32:]):
                raise VapidAuthException("Key mismatch")
        return dict(uaid=token[:16].encode('hex'),
                    chid=token[16:32].encode('hex'),
                    version=version,
                    public_key=public_key)
Exemplo n.º 39
0
 def __init__(self, key, *old_keys):
     keys = [key] + list(old_keys)
     self.fernet = MultiFernet([Fernet(k) for k in keys])
Exemplo n.º 40
0
 def test_no_fernets(self, backend):
     with pytest.raises(ValueError):
         MultiFernet([])
Exemplo n.º 41
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval",
               "max_data"]

    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 auth_key=None,
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if auth_key is None:
            auth_key = []
        if not isinstance(auth_key, list):
            auth_key = [auth_key]
        self.auth_key = auth_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(
            storage_tablename,
            storage_read_throughput,
            storage_write_throughput)
        self.message_table = get_rotating_message_table(
            message_tablename)
        self._message_prefix = message_tablename
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Used to determine whether a connection is out of date with current
        # db objects
        self.current_msg_month = make_rotating_tablename(self._message_prefix)
        self.current_month = datetime.date.today().month
        self.create_initial_message_tables()

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    @property
    def message(self):
        """Property that access the current message table"""
        return self.message_tables[self.current_msg_month]

    @message.setter
    def message(self, value):
        """Setter to set the current message table"""
        self.message_tables[self.current_msg_month] = value

    def create_initial_message_tables(self):
        """Initializes a dict of the initial rotating messages tables.

        An entry for last months table, and an entry for this months table.

        """
        last_month = get_rotating_message_table(self._message_prefix, -1)
        this_month = get_rotating_message_table(self._message_prefix)
        self.message_tables = {
            last_month.table_name: Message(last_month, self.metrics),
            this_month.table_name: Message(this_month, self.metrics),
        }

    @inlineCallbacks
    def update_rotating_tables(self):
        """This method is intended to be tasked to run periodically off the
        twisted event hub to rotate tables.

        When today is a new month from yesterday, then we swap out all the
        table objects on the settings object.

        """
        today = datetime.date.today()
        if today.month == self.current_month:
            # No change in month, we're fine.
            returnValue(False)

        # Get tables for the new month, and verify they exist before we try to
        # switch over
        message_table = yield deferToThread(get_rotating_message_table,
                                            self._message_prefix)

        # Both tables found, safe to switch-over
        self.current_month = today.month
        self.current_msg_month = message_table.table_name
        self.message_tables[self.current_msg_month] = \
            Message(message_table, self.metrics)
        returnValue(True)

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_endpoint(self, uaid, chid):
        """ Create an endpoint from the identifiers"""
        return self.endpoint_url + '/push/' + \
            self.fernet.encrypt((uaid + ':' + chid).encode('utf8'))
Exemplo n.º 42
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval",
               "max_data"]

    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 auth_key=None,
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if auth_key is None:
            auth_key = []
        if not isinstance(auth_key, list):
            auth_key = [auth_key]
        self.auth_key = auth_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.message_table = get_message_table(message_tablename,
                                               message_read_throughput,
                                               message_write_throughput)
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)
        self.message = Message(self.message_table, self.metrics)

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_endpoint(self, uaid, chid):
        """ Create an endpoint from the identifiers"""
        return self.endpoint_url + '/push/' + \
            self.fernet.encrypt((uaid + ':' + chid).encode('utf8'))
Exemplo n.º 43
0
 def _fernet_default(self):
     if cryptography is None or not self.keys:
         return None
     return MultiFernet(
         [Fernet(base64.urlsafe_b64encode(key)) for key in self.keys])
Exemplo n.º 44
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval",
               "max_data"]

    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 bear_hash_key=None,
                 preflight_uaid="deadbeef00000000deadbeef000000000",
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if bear_hash_key is None:
            bear_hash_key = []
        if not isinstance(bear_hash_key, list):
            bear_hash_key = [bear_hash_key]
        self.bear_hash_key = bear_hash_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(
            storage_tablename,
            storage_read_throughput,
            storage_write_throughput)
        self.message_table = get_rotating_message_table(
            message_tablename)
        self._message_prefix = message_tablename
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Used to determine whether a connection is out of date with current
        # db objects. There are three noteworty cases:
        # 1 "Last Month" the table requires a rollover.
        # 2 "This Month" the most common case.
        # 3 "Next Month" where the system will soon be rolling over, but with
        #   timing, some nodes may roll over sooner. Ensuring the next month's
        #   table is present before the switchover is the main reason for this,
        #   just in case some nodes do switch sooner.
        self.create_initial_message_tables()

        # Run preflight check
        preflight_check(self.storage, self.router, preflight_uaid)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    @property
    def message(self):
        """Property that access the current message table"""
        return self.message_tables[self.current_msg_month]

    @message.setter
    def message(self, value):
        """Setter to set the current message table"""
        self.message_tables[self.current_msg_month] = value

    def _tomorrow(self):
        return datetime.date.today() + datetime.timedelta(days=1)

    def create_initial_message_tables(self):
        """Initializes a dict of the initial rotating messages tables.

        An entry for last months table, an entry for this months table,
        an entry for tomorrow, if tomorrow is a new month.

        """
        today = datetime.date.today()
        last_month = get_rotating_message_table(self._message_prefix, -1)
        this_month = get_rotating_message_table(self._message_prefix)
        self.current_month = today.month
        self.current_msg_month = this_month.table_name
        self.message_tables = {
            last_month.table_name: Message(last_month, self.metrics),
            this_month.table_name: Message(this_month, self.metrics)
        }
        if self._tomorrow().month != today.month:
            next_month = get_rotating_message_table(delta=1)
            self.message_tables[next_month.table_name] = Message(
                next_month, self.metrics)

    @inlineCallbacks
    def update_rotating_tables(self):
        """This method is intended to be tasked to run periodically off the
        twisted event hub to rotate tables.

        When today is a new month from yesterday, then we swap out all the
        table objects on the settings object.

        """
        today = datetime.date.today()
        tomorrow = self._tomorrow()
        if ((tomorrow.month != today.month) and
                sorted(self.message_tables.keys())[-1] !=
                tomorrow.month):
            next_month = get_rotating_message_table(
                self._message_prefix, 0, tomorrow)
            self.message_tables[next_month.table_name] = Message(
                next_month, self.metrics)

        if today.month == self.current_month:
            # No change in month, we're fine.
            returnValue(False)

        # Get tables for the new month, and verify they exist before we try to
        # switch over
        message_table = yield deferToThread(get_rotating_message_table,
                                            self._message_prefix)

        # Both tables found, safe to switch-over
        self.current_month = today.month
        self.current_msg_month = message_table.table_name
        self.message_tables[self.current_msg_month] = \
            Message(message_table, self.metrics)
        returnValue(True)

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_simplepush_endpoint(self, uaid, chid):
        """Create a simplepush endpoint"""
        root = self.endpoint_url + "/spush/"
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))
        return root + 'v1/' + self.fernet.encrypt(base).strip('=')

    def make_endpoint(self, uaid, chid, key=None):
        """Create an v1 or v2 WebPush endpoint from the identifiers.

        Both endpoints use bytes instead of hex to reduce ID length.
        v0 is uaid.hex + ':' + chid.hex and is deprecated.
        v1 is the uaid + chid
        v2 is the uaid + chid + sha256(key).bytes

        :param uaid: User Agent Identifier
        :param chid: Channel or Subscription ID
        :param key: Optional Base64 URL-encoded application server key
        :returns: Push endpoint

        """
        root = self.endpoint_url + '/push/'
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))

        if key is None:
            return root + 'v1/' + self.fernet.encrypt(base).strip('=')

        raw_key = base64url_decode(key.encode('utf8'))
        ep = self.fernet.encrypt(base + sha256(raw_key).digest()).strip('=')
        return root + 'v2/' + ep

    def parse_endpoint(self, token, version="v0", ckey_header=None):
        """Parse an endpoint into component elements of UAID, CHID and optional
        key hash if v2

        :param token: The obscured subscription data.
        :param version: This is the API version of the token.
        :param ckey_header: the Crypto-Key header bearing the public key
        (from Crypto-Key: p256ecdsa=)

        :raises ValueError: In the case of a malformed endpoint.

        :returns: a dict containing (uaid=UAID, chid=CHID, public_key=KEY)

        """
        token = self.fernet.decrypt(repad(token).encode('utf8'))
        public_key = None
        if ckey_header:
            try:
                crypto_key = CryptoKey(ckey_header)
            except CryptoKeyException:
                raise InvalidTokenException("Invalid key data")
            label = crypto_key.get_label('p256ecdsa')
            try:
                public_key = base64url_decode(label)
            except:
                # Ignore missing and malformed app server keys.
                pass

        if version == 'v0':
            if not VALID_V0_TOKEN.match(token):
                raise InvalidTokenException("Corrupted push token")
            items = token.split(':')
            return dict(uaid=items[0], chid=items[1], public_key=public_key)
        if version == 'v1' and len(token) != 32:
            raise InvalidTokenException("Corrupted push token")
        if version == 'v2':
            if len(token) != 64:
                raise InvalidTokenException("Corrupted push token")
            if not public_key:
                raise InvalidTokenException("Invalid key data")
            if not constant_time.bytes_eq(sha256(public_key).digest(),
                                          token[32:]):
                raise InvalidTokenException("Key mismatch")
        return dict(uaid=token[:16].encode('hex'),
                    chid=token[16:32].encode('hex'),
                    public_key=public_key)
Exemplo n.º 45
0
def a(message):
    conn,cur=sql()
    if message.content_type=="document":
        file_info = bot.get_file(message.document.file_id)
        downloaded_file = bot.download_file(file_info.file_path)
        with open(message.document.file_name, 'wb') as new_file:
            new_file.write(downloaded_file)
        markup=types.InlineKeyboardMarkup()
        markup.add(types.InlineKeyboardButton("Encrypt",callback_data="En_%s,%s"%(str(message.chat.id),message.document.file_name)),
                    types.InlineKeyboardButton("Decrypt",callback_data="De_%s,%s"%(str(message.chat.id),message.document.file_name)))
        bot.send_message(message.chat.id,"what do you want to do ?!",reply_to_message_id=message.message_id,reply_markup=markup)
    elif message.content_type=="photo":
        cur.execute("SELECT * FROM data where userid='%s';"%str(message.chat.id))
        r=cur.fetchall()
        if len(r)>0:
            userid=r[0][0]
            file_add=r[0][1]
            if r[0][3]=="dQ":
                try:    
                    bot.reply_to(message,"processing please wait ....")
                    file_info=bot.get_file(message.photo[-1].file_id)
                    downloaded_file = bot.download_file(file_info.file_path)
                    with open(str(message.chat.id)+'.jpg', 'wb') as new_file:
                        new_file.write(downloaded_file)
                    with open(str(message.chat.id)+'.jpg', 'rb') as image_file:
                        image = Image.open(image_file)
                        image.load()
                    codes = zbarlight.scan_codes('qrcode', image)
                    key2 = Fernet(codes[0])
                    f = MultiFernet([key2])
                    img = Image.open(file_add)
                    arr = np.array(img)
                    temp='';token=''
                    for row in arr:
                        for p in row:
                            if int(p[0])+int(p[1])+int(p[2]) <382:
                                temp+='0'
                            else:
                                temp+='1'
                            if len(temp)==8:
                                token+= chr(int(base2.decode(temp)))
                                temp=''
                    a=f.decrypt(str.encode(token))
                    w=open('de_'+file_add,'wb')
                    w.write(a)
                    w.close()
                    bot.send_photo(userid,open('de_'+file_add,'rb'))
                    os.remove(file_add)
                    os.remove(str(message.chat.id)+'.jpg')
                    os.remove('de_'+file_add)
                except Exception as e:
                    bot.reply_to(message,"QR-key is wrong...")
                    os.remove(file_add)
                    os.remove(str(message.chat.id)+'.jpg')
                
        else:
            bot.reply_to(message ,"?!!...")
    elif message.content_type=="text":
        cur.execute("SELECT * FROM data where userid='%s';"%str(message.chat.id))
        r=cur.fetchall()
        if len(r)>0:
            bot.reply_to(message,"processing please wait ....")
            userid=r[0][0]
            file_add=r[0][1]
            password = str.encode(message.text)
            salt = b'01s39*-/ q2@1z6!'
            kdf = PBKDF2HMAC(
                algorithm=hashes.SHA256(),
                length=32,
                salt=salt,
                iterations=100000,
                backend=default_backend())
            key = base64.urlsafe_b64encode(kdf.derive(password))
            f = MultiFernet([Fernet(key)])
            if r[0][3]=="de":                
                img = Image.open(file_add)
                arr = np.array(img)
                temp='';token=''
                for row in arr:
                    for p in row:
                        if int(p[0])+int(p[1])+int(p[2]) <382:
                            temp+='0'
                        else:
                            temp+='1'
                        if len(temp)==8:
                            token+= chr(int(base2.decode(temp)))
                            temp=''
                try:
                    a=f.decrypt(str.encode(token))
                    w=open('de_%s'%file_add,'wb')
                    w.write(a)
                    w.close()
                    bot.send_photo(userid,open('de_%s'%file_add,'rb'))
                    os.remove(file_add)
                    os.remove('de_'+file_add)
                except Exception as e:
                    bot.reply_to(message,"passkey is wrong...")
                    os.remove(file_add)
            elif r[0][3]=="en":
                w=open(file_add,'rb')
                b = f.encrypt(w.read())
                h=int(math.sqrt(len(b)*8)+1)
                data = np.zeros((h, h, 3), dtype=np.uint8)
                i=0;j=0
                for byte in b:
                    if len(base2.encode(byte))<8:
                        a="00000000"+base2.encode(byte)
                        a=a[len(a)-8:len(a)]
                        for bit in a:
                            if bit=='1':
                                data[j,i]=(255,255,255)
                            else:
                                data[j,i]=(bit,bit,bit)
                            if i+1<h:
                                i+=1
                            else:
                                i=0
                                j+=1
                img = Image.fromarray(data,"RGB")
                img.save('en_%s'%file_add)
                bot.send_document(userid,open('en_%s'%file_add,'rb'),caption="")
                os.remove(file_add)
                os.remove('en_%s'%file_add)
            cur.execute("DELETE FROM data WHERE userid='%s';"%str(userid))
            conn.commit()
        else:
            bot.reply_to(message ,"?!!...")
Exemplo n.º 46
0
class RedisChannelLayer(BaseChannelLayer):
    """
    ORM-backed channel environment. For development use only; it will span
    multiple processes fine, but it's going to be pretty bad at throughput.
    """

    blpop_timeout = 5

    def __init__(self,
                 expiry=60,
                 hosts=None,
                 prefix="asgi:",
                 group_expiry=86400,
                 capacity=100,
                 channel_capacity=None,
                 symmetric_encryption_keys=None):
        super(RedisChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )
        # Make sure they provided some hosts, or provide a default
        if not hosts:
            hosts = [("localhost", 6379)]
        self.hosts = []
        for entry in hosts:
            if isinstance(entry, six.string_types):
                self.hosts.append(entry)
            else:
                self.hosts.append("redis://%s:%d/0" % (entry[0], entry[1]))
        self.prefix = prefix
        assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
        # Precalculate some values for ring selection
        self.ring_size = len(self.hosts)
        self.ring_divisor = int(math.ceil(4096 / float(self.ring_size)))
        # Create connections ahead of time (they won't call out just yet, but
        # we want to connection-pool them later)
        self._connection_list = [
            redis.Redis.from_url(host) for host in self.hosts
        ]
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8))
        # Register scripts
        connection = self.connection(None)
        self.chansend = connection.register_script(self.lua_chansend)
        self.lpopmany = connection.register_script(self.lua_lpopmany)
        self.delprefix = connection.register_script(self.lua_delprefix)
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, six.string_types):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [
                self.make_fernet(key) for key in symmetric_encryption_keys
            ]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    ### ASGI API ###

    extensions = ["groups", "flush", "twisted"]

    def send(self, channel, message):
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Write out message into expiring key (avoids big items in list)
        # TODO: Use extended set, drop support for older redis?
        message_key = self.prefix + uuid.uuid4().hex
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for response
        # channels, random for normal channels
        if "!" in channel or "?" in channel:
            index = self.consistent_hash(channel)
            connection = self.connection(index)
        else:
            connection = self.connection(None)
        # Use the Lua function to do the set-and-push
        try:
            self.chansend(
                keys=[message_key, channel_key],
                args=[
                    self.serialize(message), self.expiry,
                    self.get_capacity(channel)
                ],
            )
        except redis.exceptions.ResponseError as e:
            # The Lua script handles capacity checking and sends the "full" error back
            if e.args[0] == "full":
                raise self.ChannelFull

    def receive_many(self, channels, block=False):
        # List name get
        indexes = self._receive_many_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            return None, None
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Open a connection
            connection = self.connection(index)
            # Pop off any waiting message
            if block:
                result = connection.blpop(list_names,
                                          timeout=self.blpop_timeout)
            else:
                result = self.lpopmany(keys=list_names, client=connection)
            if result:
                content = connection.get(result[1])
                # If the content key expired, keep going.
                if content is None:
                    continue
                # Return the channel it's from and the message
                return result[0][len(self.prefix):].decode(
                    "utf8"), self.deserialize(content)
            else:
                return None, None

    def _receive_many_list_names(self, channels):
        """
        Inner logic of receive_many; takes channels, groups by shard, and
        returns {connection_index: list_names ...} if a query is needed or
        None for a vacuously empty response.
        """
        # Short circuit if no channels
        if not channels:
            return None
        # Check channel names are valid
        channels = list(channels)
        assert all(
            self.valid_channel_name(channel)
            for channel in channels), "One or more channel names invalid"
        # Work out what servers to listen on for the given channels
        indexes = {}
        random_index = self.random_index()
        for channel in channels:
            if "!" in channel or "?" in channel:
                indexes.setdefault(self.consistent_hash(channel),
                                   []).append(self.prefix + channel, )
            else:
                indexes.setdefault(random_index,
                                   []).append(self.prefix + channel, )
        return indexes

    def new_channel(self, pattern):
        assert isinstance(pattern, six.text_type)
        # Keep making channel names till one isn't present.
        while True:
            random_string = "".join(
                random.choice(string.ascii_letters) for i in range(12))
            assert pattern.endswith("!") or pattern.endswith("?")
            new_name = pattern + random_string
            # Get right connection
            index = self.consistent_hash(new_name)
            connection = self.connection(index)
            # Check to see if it's in the connected Redis.
            # This fails to stop collisions for sharding where the channel is
            # non-single-listener, but that seems very unlikely.
            key = self.prefix + new_name
            if not connection.exists(key):
                return new_name

    ### ASGI Group extension ###

    def group_add(self, group, channel):
        """
        Adds the channel to the named group for at least 'expiry'
        seconds (expiry defaults to message expiry if not provided).
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        group_key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Add to group sorted set with creation time as timestamp
        connection.zadd(group_key, **{channel: time.time()})
        # Set both expiration to be group_expiry, since everything in
        # it at this point is guaranteed to expire before that
        connection.expire(group_key, self.group_expiry)

    def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        self.connection(self.consistent_hash(group)).zrem(
            key,
            channel,
        )

    def group_channels(self, group):
        """
        Returns all channels in the group as an iterable.
        """
        key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Discard old channels based on group_expiry
        connection.zremrangebyscore(key, 0,
                                    int(time.time()) - self.group_expiry)
        # Return current lot
        return [x.decode("utf8") for x in connection.zrange(
            key,
            0,
            -1,
        )]

    def send_group(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # TODO: More efficient implementation (lua script per shard?)
        for channel in self.group_channels(group):
            try:
                self.send(channel, message)
            except self.ChannelFull:
                pass

    def _group_key(self, group):
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    ### Flush extension ###

    def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        for connection in self._connection_list:
            self.delprefix(keys=[],
                           args=[self.prefix + "*"],
                           client=connection)

    ### Twisted extension ###

    @defer.inlineCallbacks
    def receive_many_twisted(self, channels):
        """
        Twisted-native implementation of receive_many.
        """
        # List name get
        indexes = self._receive_many_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            defer.returnValue((None, None))
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Get a sync connection for conn details
            sync_connection = self.connection(index)
            twisted_connection = yield txredisapi.ConnectionPool(
                host=sync_connection.connection_pool.connection_kwargs['host'],
                port=sync_connection.connection_pool.connection_kwargs['port'],
                dbid=sync_connection.connection_pool.connection_kwargs['db'],
            )
            try:
                # Pop off any waiting message
                result = yield twisted_connection.blpop(
                    list_names, timeout=self.blpop_timeout)
                if result:
                    content = yield twisted_connection.get(result[1])
                    # If the content key expired, keep going.
                    if content is None:
                        continue
                    # Return the channel it's from and the message
                    defer.returnValue((result[0][len(self.prefix):],
                                       self.deserialize(content)))
                else:
                    defer.returnValue((None, None))
            finally:
                yield twisted_connection.disconnect()

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Redis Lua scripts ###

    # Single-command channel send. Returns error if over capacity.
    # Keys: message, channel_list
    # Args: content, expiry, capacity
    lua_chansend = """
        if redis.call('llen', KEYS[2]) >= tonumber(ARGV[3]) then
            return redis.error_reply("full")
        end
        redis.call('set', KEYS[1], ARGV[1])
        redis.call('expire', KEYS[1], ARGV[2])
        redis.call('rpush', KEYS[2], KEYS[1])
        redis.call('expire', KEYS[2], ARGV[2] + 1)
    """

    lua_lpopmany = """
        for keyCount = 1, #KEYS do
            local result = redis.call('LPOP', KEYS[keyCount])
            if result then
                return {KEYS[keyCount], result}
            end
        end
        return {nil, nil}
    """

    lua_delprefix = """
        local keys = redis.call('keys', ARGV[1])
        for i=1,#keys,5000 do
            redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
        end
    """

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using MD5, then down to one of the ring nodes.
        """
        if isinstance(value, six.text_type):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xffffffff
        return (bigval // 0x100000) // self.ring_divisor

    def random_index(self):
        return random.randint(0, len(self.hosts) - 1)

    def connection(self, index):
        """
        Returns the correct connection for the current thread.

        Pass key to use a server based on consistent hashing of the key value;
        pass None to use a random server instead.
        """
        # If index is explicitly None, pick a random server
        if index is None:
            index = self.random_index()
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" %
                             (self.ring_size, index))
        return self._connection_list[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, six.text_type):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
Exemplo n.º 47
0
from django.conf import settings
from django.utils.encoding import force_bytes

from celery import Celery
from cryptography.fernet import Fernet, MultiFernet, InvalidToken
from django_statsd.clients import statsd
from kombu import serialization
from kombu.utils import json

FERNET = None

if settings.KOMBU_FERNET_KEY:
    FERNET = Fernet(settings.KOMBU_FERNET_KEY)
    if settings.KOMBU_FERNET_KEY_PREVIOUS:
        # this will try both keys. for key rotation.
        FERNET = MultiFernet(
            [FERNET, Fernet(settings.KOMBU_FERNET_KEY_PREVIOUS)])


def fernet_dumps(message):
    statsd.incr('basket.news.celery.fernet_dumps')
    message = json.dumps(message)
    if FERNET:
        statsd.incr('basket.news.celery.fernet_dumps.encrypted')
        return FERNET.encrypt(force_bytes(message))

    statsd.incr('basket.news.celery.fernet_dumps.unencrypted')
    return message


def fernet_loads(encoded_message):
    statsd.incr('basket.news.celery.fernet_loads')