示例#1
0
def _datasources(request):
    form = TwitterForm(request.form)
    if request.method == 'POST' and form.validate():
        m1 = form.access_token.data
        m2 = form.access_token_secret.data
        m3 = form.consumer_key.data
        m4 = form.consumer_secret.data
        my_list = [
            bytes(m1, 'utf-8'),
            bytes(m2, 'utf-8'),
            bytes(m3, 'utf-8'),
            bytes(m4, 'utf-8')
        ]
        first = Fernet.generate_key()
        second = Fernet.generate_key()
        key1 = Fernet(first)
        key2 = Fernet(second)
        x = MultiFernet([key1, key2])
        token1 = x.encrypt(my_list[0])
        token2 = x.encrypt(my_list[1])
        token3 = x.encrypt(my_list[2])
        token4 = x.encrypt(my_list[3])
        con = sqlite3.connect(settings['MAIN_DB'])
        con.execute("""CREATE TABLE IF NOT EXISTS {tn} (username TEXT,
                               key1 BLOB,
                               key2 BLOB, PRIMARY KEY (username))""".format(
            tn="secret_keys"))
        con.execute("""INSERT INTO secret_keys VALUES (?,?,?)""",
                    (session['username'], first, second))
        con.execute("""CREATE TABLE IF NOT EXISTS {tn} (username TEXT,
                       access_token BLOB,
                       access_token_secret BLOB,
                       consumer_key BLOB,
                       consumer_secret BLOB, PRIMARY KEY (username))""".format(
            tn="secrets"))
        con.execute("""INSERT INTO secrets VALUES (?,?,?,?,?)""",
                    (session['username'], token1, token2, token3, token4))
        con.commit()
        con.close()
        User(session['username']).create_data_source("Twitter")
        session['data_sources'] = User(session['username']).fetch_data_list()
        sources = User(session['username']).fetch_data_list()
        return render_template('User/datasources.html',
                               form=form,
                               sources=sources)
    else:
        sources = User(session['username']).fetch_data_list()
        return render_template('User/datasources.html',
                               form=form,
                               sources=sources)
示例#2
0
def setpasswd(c, user, appname="passvault"):
    "Set password for a username and application (optional)"
    config = c.config
    passwd = getpass.getpass(prompt='Set an app password for %s: ' % (user))
    bkey1 = Fernet.generate_key()
    key1 = Fernet(bkey1)
    key2, salt = encode_passwd(c.config["passwd"])
    key2 = Fernet(key2)
    f = MultiFernet([key1, key2])
    token = f.encrypt(passwd.encode())
    with shelve.open(config["conffile"]) as tapp:
        app = {"apps": tapp["apps"], "passwds": tapp["passwds"]}
        if not appname in app["apps"]:
            app["apps"][appname] = []
        if not appname in app["passwds"]:
            app["passwds"][appname] = {}
        #print(app["apps"])
        if not user in app["apps"][appname]:
            app["apps"][appname].append(user)
        #if not user in app["passwds"][appname]:
        #print(bkey1,token,salt)
        app["passwds"][appname][user] = {
            "key": bkey1,
            "token": token,
            "salt": salt
        }
        logging.info("Saving your password to vault")
        tapp["apps"] = app["apps"]
        tapp["passwds"] = app["passwds"]
示例#3
0
class CryptKeeper:
    def __init__(self, keys: List[Union[str, bytes]] = None):
        self._keys = self.read_keys(keys)

        self.check_available()

        self._fernet = MultiFernet(
            [Fernet(base64.urlsafe_b64encode(key)) for key in self._keys]
        )

    def read_keys(self, keys: List[Union[str, bytes]] = None):
        if keys is not None:
            return [_validate_key(key) for key in keys]
        else:
            return [
                _validate_key(key) for key in KEY_FROM_ENV.split(";") if key.strip()
            ]

    def encrypt(self, data: str):
        self.check_available()
        return self._fernet.encrypt(data.encode("utf8")).decode("utf-8")

    def decrypt(self, data: str):
        self.check_available()
        return self._fernet.decrypt(data.encode("utf-8")).decode("utf-8")

    def check_available(self):
        if Fernet is None or MultiFernet is None:
            raise CryptographyUnavailable()

        if not self._keys:
            raise NoEncryptionKeys()
示例#4
0
文件: fern.py 项目: thien/smc
def multiEncryptInput(privateKeys, value):
    initiateKeys = []
    for i in privateKeys:
        initiateKeys.append(Fernet(i))
    crypt = MultiFernet(initiateKeys)
    message = str(value).encode('utf-8')
    encryptedValue = crypt.encrypt(message)
    return encryptedValue
def encrypt(message):
    key1 = Fernet.generate_key()
    key2 = Fernet.generate_key()
    k1 = Fernet(key1)
    k2 = Fernet(key2)
    cryptoKey = MultiFernet([k1, k2])
    encryptedMessage = cryptoKey.encrypt(message.encode('ascii'))
    return [key1, key2, encryptedMessage]
def main(args):
    startTime = time.time()
    obj = args.get("crypt", "crypt test")
    obj = bytes(obj, 'utf-8')
    key1 = Fernet(Fernet.generate_key())
    key2 = Fernet(Fernet.generate_key())
    f = MultiFernet([key1, key2])
    token = f.encrypt(obj)
    return {'token': str(token), 'startTime': int(round(startTime * 1000))}
示例#7
0
    def test_rotate_decrypt_no_shared_keys(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2])

        with pytest.raises(InvalidToken):
            mf2.rotate(mf1.encrypt(b"abc"))
    def test_rotate_decrypt_no_shared_keys(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2])

        with pytest.raises(InvalidToken):
            mf2.rotate(mf1.encrypt(b"abc"))
示例#9
0
def test_cryptography(selenium):
    import base64

    from cryptography.fernet import Fernet, MultiFernet

    f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32))
    f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32))
    f = MultiFernet([f1, f2])

    assert f1.decrypt(f.encrypt(b"abc")) == b"abc"
示例#10
0
def get_fernet_encrypted_text(text_to_encrypt, encryption_key, encryption_key2):
    b64_hash_key1 = generate_b64_32byte_hash_from_string(encryption_key)
    b64_hash_key2 = generate_b64_32byte_hash_from_string(encryption_key2)
    fernet_cipher1 = Fernet(b64_hash_key1)
    fernet_cipher2 = Fernet(b64_hash_key2)
    c = MultiFernet([fernet_cipher1,fernet_cipher2])
    bytes_to_encrypt = text_to_encrypt.encode("utf-8")
    encrypted_bytes = c.encrypt(bytes_to_encrypt)
    encrypted_text = encrypted_bytes.decode("utf-8")
    return [encrypted_text,encryption_key,encryption_key2]
示例#11
0
def create_peer_config(valid: int, hostname: str, aes_key: bytes,
                       aes_iv: bytes) -> bytes:
    """Create a fernet token."""
    fernet = MultiFernet([Fernet(key) for key in FERNET_TOKENS])

    return fernet.encrypt(
        json.dumps({
            "valid": valid,
            "hostname": hostname,
            "aes_key": aes_key.hex(),
            "aes_iv": aes_iv.hex(),
        }).encode())
def create_garble_tables(circuit, p_values, keys):
    for gate in circuit.Gates:
        if (gate.type == "NOT"):
            for input in util.create_all_combination(1):
                p_val = input[0] ^ p_values[gate.input[0]]
                input_keys = keys[gate.input[0]][p_val]
                f = MultiFernet([Fernet(input_keys), Fernet(input_keys)])
                output_value = gate.evaluate(input) ^ p_values[gate.id]
                output_value = (output_value, keys[gate.id][output_value])
                output_value = f.encrypt(pickle.dumps(output_value))
                gate.garbled_table.add_entry([p_val], output_value )
        else:
            for input in util.create_all_combination(2):
                ps = list(map(lambda x: input[x] ^ p_values[gate.input[x]],
                              [0,1]))
                input_keys = list(map(lambda x, y: keys[x][y], gate.input, ps))
                f = MultiFernet(list(map(lambda x: Fernet(x), input_keys)))
                output_value = gate.evaluate(input) ^ p_values[gate.id]
                output_value = (output_value, keys[gate.id][output_value])
                output_value = f.encrypt(pickle.dumps(output_value))
                gate.garbled_table.add_entry(ps, output_value)
示例#13
0
def Algo1_extented(filename, key1, key2):
    f = MultiFernet([Fernet(key1), Fernet(key2)])
    source_filename = 'files/' + filename
    target_filename = 'encrypted/' + filename
    file = open(source_filename, 'rb')
    target_file = open(target_filename, 'wb')
    raw = ""
    for line in file:
        raw = raw + line
    secret_data = f.encrypt(raw)
    target_file.write(secret_data)
    file.close()
    target_file.close()
示例#14
0
 def encrypt(self, raw):
     raw = bytes(raw, encoding='utf-8')
     salt1 = os.urandom(16)
     salt2 = Fernet.generate_key()
     kdf = PBKDF2HMAC(
         algorithm=hashes.SHA256(),
         length=32,
         salt=salt1,
         iterations=100000,
         backend=default_backend())
     key1 = Fernet(base64.urlsafe_b64encode(kdf.derive(self.__key)))
     key2 = Fernet(salt2)
     f = MultiFernet([key1, key2])
     return base64.b64encode(salt1) + salt2 + f.encrypt(raw)
示例#15
0
    def cryptKeyPairs(cls, msg, key1, key2):
        """

    Example

    Documentation

    :param msg:
    :param key1:
    :param key2:
    :return:
    """
        from cryptography.fernet import Fernet, MultiFernet
        f = MultiFernet([Fernet(key1), Fernet(key2)])
        return f.encrypt(bytes(msg.encode('latin1')))
def handle(args):
    startTime = time.time()
    try:
        obj = args.get("array", "crypt test")
    except AttributeError:
        obj = "crypt test"
        pass
    obj = bytes(obj, 'utf-8')
    key1 = Fernet(Fernet.generate_key())
    key2 = Fernet(Fernet.generate_key())
    f = MultiFernet([key1, key2])
    token = f.encrypt(obj)
    return json.dumps({
        'token': str(token),
        'startTime': int(round(startTime * 1000))
    })
示例#17
0
    def cryptKeyPairs(cls, msg, key1, key2):
        """
    Description:
    ------------

    Usage::

    Attributes:
    ----------
    :param msg:
    :param key1:
    :param key2:
    """
        from cryptography.fernet import Fernet, MultiFernet

        f = MultiFernet([Fernet(key1), Fernet(key2)])
        return f.encrypt(bytes(msg.encode('latin1')))
示例#18
0
 def encryptFile(self, file, aes=FCRYPTERAES):
     if not self.extend in file:
         file = self.merge+file
     if aes:
         with open(file, "rb") as _file:
             text = _file.read()
         encrypted = self.aesCrypt(text)
         with open(file + ext, "wb") as encrypted_file:
             encrypted_file.write(encrypted)
             os.remove(file)
     else:
         fernet_cipher = MultiFernet([Fernet(b64encode(self.key)), Fernet(b64encode(self.key[::-1]))])
         with open(file, "rb") as _file:
             data = _file.read()
         crypted = fernet_cipher.encrypt(data)
         with open(file + ext, "wb") as new:
             new.write(crypted)
             os.remove(file)
示例#19
0
    def _encrypt_credentials(self, credentials):
        """
        Encrypt credentials json string.

        Returns: Encrypted and decoded string.
        """
        encryption_keys = self._get_encryption_keys_from_file(
            self.encryption_keys_file
        )
        fernet = MultiFernet(encryption_keys)

        try:
            # Ensure creds string is encoded as bytes
            credentials = credentials.encode()
        except Exception:
            pass

        return fernet.encrypt(credentials).decode()
示例#20
0
def generate_client_token(
    tokens: List[str],
    valid_delta: timedelta,
    hostname: str,
    aes_key: bytes,
    aes_iv: bytes,
):
    """Generate a token for client."""
    fernet = MultiFernet([Fernet(key) for key in tokens])
    valid = datetime.utcnow() + valid_delta

    return fernet.encrypt(
        json.dumps({
            "valid": valid.timestamp(),
            "hostname": hostname,
            "aes_key": aes_key.hex(),
            "aes_iv": aes_iv.hex(),
        }).encode())
示例#21
0
    def test_rotate(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        assert mf2.decrypt(mf1_ciphertext) == plaintext

        rotated = mf2.rotate(mf1_ciphertext)

        assert rotated != mf1_ciphertext
        assert mf2.decrypt(rotated) == plaintext

        with pytest.raises(InvalidToken):
            mf1.decrypt(rotated)
示例#22
0
class EncryptedNewlineReader(NewlineReader):
    def __init__(self, fernet_keys=None):
        super().__init__()
        fernet_keys = fernet_keys or settings.LAYERS_MANAGER_BALANCER_FERNET_KEYS
        self.fernet = MultiFernet([Fernet(key) for key in fernet_keys])

    def __iter__(self):
        for token in super().__iter__():
            line = self.fernet.decrypt(token)
            assert b"\n" not in line
            yield line

    def encrypt_line(self, line):
        assert line and b"\n" not in line
        token = self.fernet.encrypt(line)
        # Fernet.decrypt returns a URL-safe string, so it's guaranteed
        # that there will be no newlines.
        assert b"\n" not in token
        return token
    def test_rotate(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        assert mf2.decrypt(mf1_ciphertext) == plaintext

        rotated = mf2.rotate(mf1_ciphertext)

        assert rotated != mf1_ciphertext
        assert mf2.decrypt(rotated) == plaintext

        with pytest.raises(InvalidToken):
            mf1.decrypt(rotated)
示例#24
0
def payment_method():
    payment_method = """
    enter the method no. you would like to pay:-
    1) Net Banking
    2) Debit/Credit Card
    3) Cash
    ->"""
    payment_type = int(input(payment_method))
    key1 = Fernet(Fernet.generate_key())
    Key2 = Fernet(Fernet.generate_key())
    f = MultiFernet([key1, Key2])

    token = f.encrypt(payment_type)

    print(token)

    d = f.decrypt(token)

    print(d.decode())
示例#25
0
    def test_rotate_preserves_timestamp(self, backend, monkeypatch):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        later = datetime.datetime.now() + datetime.timedelta(minutes=5)
        later_time = time.mktime(later.timetuple())
        monkeypatch.setattr(time, "time", lambda: later_time)

        original_time, _ = Fernet._get_unverified_token_data(mf1_ciphertext)
        rotated_time, _ = Fernet._get_unverified_token_data(
            mf2.rotate(mf1_ciphertext))

        assert later_time != rotated_time
        assert original_time == rotated_time
示例#26
0
    def test_rotate_preserves_timestamp(self, backend, monkeypatch):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)

        mf1 = MultiFernet([f1])
        mf2 = MultiFernet([f2, f1])

        plaintext = b"abc"
        mf1_ciphertext = mf1.encrypt(plaintext)

        later = datetime.datetime.now() + datetime.timedelta(minutes=5)
        later_time = time.mktime(later.timetuple())
        monkeypatch.setattr(time, "time", lambda: later_time)

        original_time, _ = Fernet._get_unverified_token_data(mf1_ciphertext)
        rotated_time, _ = Fernet._get_unverified_token_data(
            mf2.rotate(mf1_ciphertext)
        )

        assert later_time != rotated_time
        assert original_time == rotated_time
示例#27
0
class EncryptingPacker(object):
    """Implement conversion of Python objects to/from encrypted bytestrings.

    :param str key: a `Fernet`_ key to use for encryption and decryption
    :param list old_keys: additional `Fernet`_ keys to use for decryption

    .. note::

        Encrypted messages contain the timestamp at which they were generated
        *in plaintext*. See `our audit`_ for discussion of this and other
        considerations with `Fernet`_.

    .. _Fernet: https://cryptography.io/en/latest/fernet/
    .. _our audit: https://github.com/gratipay/gratipay.com/pull/3998#issuecomment-216227070

    """

    def __init__(self, key, *old_keys):
        keys = [key] + list(old_keys)
        self.fernet = MultiFernet([Fernet(k) for k in keys])

    def pack(self, obj):
        """Given a JSON-serializable object, return a `Fernet`_ token.
        """
        obj = json.dumps(obj)           # serialize to unicode
        obj = obj.encode('utf8')        # convert to bytes
        obj = self.fernet.encrypt(obj)  # encrypt
        return obj

    def unpack(self, token):
        """Given a `Fernet`_ token with JSON in the ciphertext, return a Python object.
        """
        obj = token
        if not type(obj) is bytes:
            raise TypeError("need bytes, got {}".format(type(obj)))
        obj = self.fernet.decrypt(obj)  # decrypt
        obj = obj.decode('utf8')        # convert to unicode
        obj = json.loads(obj)           # deserialize from unicode
        return obj
示例#28
0
class EncryptingPacker(object):
    """Implement conversion of Python objects to/from encrypted bytestrings.

    :param str key: a `Fernet`_ key to use for encryption and decryption
    :param list old_keys: additional `Fernet`_ keys to use for decryption

    .. note::

        Encrypted messages contain the timestamp at which they were generated
        *in plaintext*. See `our audit`_ for discussion of this and other
        considerations with `Fernet`_.

    .. _Fernet: https://cryptography.io/en/latest/fernet/
    .. _our audit: https://github.com/gratipay/gratipay.com/pull/3998#issuecomment-216227070

    """

    def __init__(self, key, *old_keys):
        keys = [key] + list(old_keys)
        self.fernet = MultiFernet([Fernet(k) for k in keys])

    def pack(self, obj):
        """Given a JSON-serializable object, return a `Fernet`_ token.
        """
        obj = json.dumps(obj)           # serialize to unicode
        obj = obj.encode('utf8')        # convert to bytes
        obj = self.fernet.encrypt(obj)  # encrypt
        return obj

    def unpack(self, token):
        """Given a `Fernet`_ token with JSON in the ciphertext, return a Python object.
        """
        obj = token
        if not type(obj) is bytes:
            raise TypeError("need bytes, got {}".format(type(obj)))
        obj = self.fernet.decrypt(obj)  # decrypt
        obj = obj.decode('utf8')        # convert to unicode
        obj = json.loads(obj)           # deserialize from unicode
        return obj
示例#29
0
class SecretEngine(BaseSecretEngine):
    kd_iterations = 100000

    def __init__(self, config_d):
        super().__init__(config_d)
        salt = settings.SECRET_KEY.encode("utf-8")
        fernets = []
        for password in config_d["passwords"]:
            kdf = PBKDF2HMAC(algorithm=hashes.SHA256(),
                             length=32,
                             salt=salt,
                             iterations=self.kd_iterations)
            fernets.append(
                Fernet(
                    base64.urlsafe_b64encode(
                        kdf.derive(password.encode("utf-8")))))
        self._multifernet = MultiFernet(fernets)

    def encrypt(self, data, **context):
        return self._multifernet.encrypt(data).decode("utf-8")

    def decrypt(self, data, **context):
        return self._multifernet.decrypt(data.encode("utf-8"))
示例#30
0
class SecretStore:
    def __init__(self, *master_keys, encrypted_store: dict = None):
        if not len(master_keys):
            raise ValueError('at least one master key must be passed')
        self.crypt = MultiFernet([Fernet(key) for key in master_keys])
        if not encrypted_store:
            self.encrypted_store = dict()
        else:
            self.encrypted_store = encrypted_store

    @staticmethod
    def generate_master_key():
        return Fernet.generate_key()

    @staticmethod
    def add_master_key(key_yaml_path):
        master_key = SecretStore.generate_master_key()
        try:
            master_keys = SecretStore._load_keys(key_yaml_path)
        except OSError:
            master_keys = []
        master_keys = [master_key] + master_keys
        SecretStore._save_as_yaml(key_yaml_path, 'keys', master_keys)
        return master_keys

    @staticmethod
    def _load_keys(key_yaml_path):
        with open(key_yaml_path, 'r') as key_file:
            master_keys = yaml.load(key_file)['keys']
            return master_keys

    @classmethod
    def load_from_yaml(cls, key_yaml_path, store_yaml_path=None, encrypted=True):
        master_keys = SecretStore._load_keys(key_yaml_path)
        secret_store = cls(*master_keys)
        if store_yaml_path:
            secret_store.load_as_yaml(store_yaml_path, encrypted=encrypted)
        return secret_store

    def encrypt_copy(self, plain_store, *path):
        for key in plain_store:
            value = plain_store[key]
            if isinstance(value, bytes) or isinstance(value, str):
                self.set_secret(value, *path, key)
            else:
                self.encrypt_copy(value, *(list(path) + [key]))

    def set_secret(self, secret, *path):
        if not len(path):
            raise ValueError('path to secret must not be empty')
        if not (isinstance(secret, bytes) or isinstance(secret, str)):
            raise ValueError(
                'secret must be bytes or str, but {0} is passed'.format(
                    type(secret)))
        if isinstance(secret, str):
            secret = secret.encode('utf-8')
        encrypted_secret = self.crypt.encrypt(secret)
        store = self.encrypted_store
        for key in path[:-1]:
            store = store.setdefault(key, dict())
        store[path[-1]] = encrypted_secret

    def get_secret(self, *path):
        encrypted_secret = self.get_encrypted_secret(*path)
        return self.crypt.decrypt(encrypted_secret)

    def delete_secret(self, *path):
        if not len(path):
            raise ValueError('path to secret must not be empty')
        store = self.encrypted_store
        for key in path[:-1]:
            store = store[key]
        del store[path[-1]]

    def get_encrypted_secret(self, *path):
        if not len(path):
            raise ValueError('path to secret must not be empty')
        store = self.encrypted_store
        for key in path[:-1]:
            store = store[key]
        encrypted_secret = store[path[-1]]
        return encrypted_secret

    def load_as_yaml(self, yaml_path, encrypted=True):
        with open(yaml_path, 'r') as secret_file:
            secret_storage = yaml.load(secret_file)
            if encrypted:
                self.encrypted_store = secret_storage['encrypted_store']
            else:
                self.encrypt_copy(secret_storage['encrypted_store'])

    def save_as_yaml(self, yaml_path):
        SecretStore._save_as_yaml(yaml_path, 'encrypted_store', self.encrypted_store)

    def print_as_yaml(self):
        print(yaml.dump(self.encrypted_store, default_flow_style=False))

    @staticmethod
    def _wrap_payload(payload_key, payload):
        now = datetime.now()
        timestamp = now.replace(tzinfo=timezone.utc).timestamp()
        wrapper = {
            'meta': {
                'method': 'fernet',
                'timestamp': timestamp,
                'timezone': 'utc'
            },
            payload_key: payload
        }
        return wrapper

    @staticmethod
    def _save_as_yaml(yaml_path, payload_key, payload):
        content = SecretStore._wrap_payload(payload_key, payload)
        with open(yaml_path, 'w') as yaml_file:
            yaml.dump(content, yaml_file, default_flow_style=False)
    def test_encrypt(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)
        f = MultiFernet([f1, f2])

        assert f1.decrypt(f.encrypt(b"abc")) == b"abc"
示例#32
0
class AutopushConfig(object):
    """Main Autopush Settings Object"""

    debug = attrib(default=False)  # type: bool

    fernet = attrib(init=False)  # type: MultiFernet
    _crypto_key = attrib(
        converter=_init_crypto_key, default=None)  # type: List[str]

    bear_hash_key = attrib(default=Factory(list))  # type: List[str]
    human_logs = attrib(default=True)  # type: bool

    hostname = attrib(default=None)  # type: Optional[str]
    port = attrib(default=None)  # type: Optional[int]
    _resolve_hostname = attrib(default=False)  # type: bool

    router_scheme = attrib(default=None)  # type: Optional[str]
    router_hostname = attrib(default=None)  # type: Optional[str]
    router_port = attrib(default=None)  # type: Optional[int]

    endpoint_scheme = attrib(default=None)  # type: Optional[str]
    endpoint_hostname = attrib(default=None)  # type: Optional[str]
    endpoint_port = attrib(default=None)  # type: Optional[int]

    proxy_protocol_port = attrib(default=None)  # type: Optional[int]
    memusage_port = attrib(default=None)  # type: Optional[int]

    statsd_host = attrib(default="localhost")  # type: str
    statsd_port = attrib(default=8125)  # type: int
    megaphone_api_url = attrib(default=None)  # type: Optional[str]
    megaphone_api_token = attrib(default=None)  # type: Optional[str]
    megaphone_poll_interval = attrib(default=30)  # type: int

    datadog_api_key = attrib(default=None)  # type: Optional[str]
    datadog_app_key = attrib(default=None)  # type: Optional[str]
    datadog_flush_interval = attrib(default=None)  # type: Optional[int]

    router_table = _nested(
        DDBTableConfig,
        default=dict(tablename="router")
    )  # type: DDBTableConfig
    message_table = _nested(
        DDBTableConfig,
        default=dict(tablename="message")
    )  # type: DDBTableConfig

    preflight_uaid = attrib(
        default="deadbeef00000000deadbeef00000000")  # type: str

    ssl = _nested(SSLConfig, default=Factory(SSLConfig))  # type: SSLConfig
    router_ssl = _nested(
        SSLConfig, default=Factory(SSLConfig))  # type: SSLConfig
    client_certs = attrib(default=None)  # type: Optional[Dict[str, str]]

    router_url = attrib(init=False)  # type: str
    endpoint_url = attrib(init=False)  # type: str
    ws_url = attrib(init=False)  # type: str

    router_conf = attrib(default=Factory(dict))  # type: JSONDict

    # twisted Agent's connectTimeout
    connect_timeout = attrib(default=0.5)  # type: float
    max_data = attrib(default=4096)  # type: int
    env = attrib(default='development')  # type: str
    ami_id = attrib(default=None)  # type: Optional[str]
    cors = attrib(default=False)  # type: bool

    hello_timeout = attrib(default=0)  # type: int
    # Force timeout in idle seconds
    msg_limit = attrib(default=100)  # type: int
    auto_ping_interval = attrib(default=None)  # type: Optional[int]
    auto_ping_timeout = attrib(default=None)  # type: Optional[int]
    max_connections = attrib(default=None)  # type: Optional[int]
    close_handshake_timeout = attrib(default=None)  # type: Optional[int]

    # Generate messages per legacy rules, only used for testing to
    # generate legacy data.
    _notification_legacy = attrib(default=False)  # type: bool

    # Use the cryptography library
    use_cryptography = attrib(default=False)  # type: bool

    # Strict-Transport-Security max age (Default 1 year in secs)
    sts_max_age = attrib(default=31536000)  # type: int

    # Don't cache ssl.wrap_socket's SSLContexts
    no_sslcontext_cache = attrib(default=False)  # type: bool

    # DynamoDB endpoint override
    aws_ddb_endpoint = attrib(default=None)  # type: str

    allow_table_rotation = attrib(default=True)  # type: bool

    def __attrs_post_init__(self):
        """Initialize the Settings object"""
        # Setup hosts/ports/urls
        if not self.hostname:
            self.hostname = socket.gethostname()
        if self._resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        if not self.endpoint_hostname:
            self.endpoint_hostname = self.hostname
        if not self.router_hostname:
            self.router_hostname = self.hostname

        self.router_url = canonical_url(
            self.router_scheme or 'http',
            self.router_hostname,
            self.router_port
        )
        self.endpoint_url = canonical_url(
            self.endpoint_scheme or 'http',
            self.endpoint_hostname,
            self.endpoint_port
        )
        # not accurate under autoendpoint (like router_url)
        self.ws_url = "{}://{}:{}/".format(
            'wss' if self.ssl.key else 'ws',
            self.hostname,
            self.port
        )

        self.fernet = MultiFernet([Fernet(key) for key in self._crypto_key])

    @property
    def enable_tls_auth(self):
        """Whether TLS authentication w/ client certs is enabled"""
        return self.client_certs is not None

    @classmethod
    def from_argparse(cls, ns, **kwargs):
        # type: (Namespace, **Any) -> AutopushConfig
        """Create an instance from argparse/additional kwargs"""
        router_conf = {}
        if ns.key_hash:
            db.key_hash = ns.key_hash
        if ns.apns_creds:
            # if you have the critical elements for each external
            # router, create it
            try:
                router_conf["apns"] = json.loads(ns.apns_creds)
            except (ValueError, TypeError):
                raise InvalidConfig(
                    "Invalid JSON specified for APNS config options")
        if ns.senderid_list:
            # Create a common gcmclient
            try:
                sender_ids = json.loads(ns.senderid_list)
            except (ValueError, TypeError):
                raise InvalidConfig("Invalid JSON specified for senderid_list")
            try:
                # This is an init check to verify that things are
                # configured correctly. Otherwise errors may creep in
                # later that go unaccounted.
                sender_ids[sender_ids.keys()[0]]
            except (IndexError, TypeError):
                raise InvalidConfig("No GCM SenderIDs specified or found.")
            router_conf["gcm"] = {"ttl": ns.gcm_ttl,
                                  "dryrun": ns.gcm_dryrun,
                                  "max_data": ns.max_data,
                                  "collapsekey": ns.gcm_collapsekey,
                                  "senderIDs": sender_ids,
                                  "endpoint": ns.gcm_endpoint}
        client_certs = None
        # endpoint only
        if getattr(ns, 'client_certs', None):
            try:
                client_certs_arg = json.loads(ns.client_certs)
            except (ValueError, TypeError):
                raise InvalidConfig("Invalid JSON specified for client_certs")
            if client_certs_arg:
                if not ns.ssl_key:
                    raise InvalidConfig("client_certs specified without SSL "
                                        "enabled (no ssl_key specified)")
                client_certs = {}
                for name, sigs in client_certs_arg.iteritems():
                    if not isinstance(sigs, list):
                        raise InvalidConfig(
                            "Invalid JSON specified for client_certs")
                    for sig in sigs:
                        sig = sig.upper()
                        if (not name or not CLIENT_SHA256_RE.match(sig) or
                                sig in client_certs):
                            raise InvalidConfig(
                                "Invalid client_certs argument")
                        client_certs[sig] = name

        if ns.fcm_creds:
            try:
                router_conf["fcm"] = {
                    "version": ns.fcm_version,
                    "ttl": ns.fcm_ttl,
                    "dryrun": ns.fcm_dryrun,
                    "max_data": ns.max_data,
                    "collapsekey": ns.fcm_collapsekey,
                    "creds": json.loads(ns.fcm_creds)
                }
                if not router_conf["fcm"]["creds"]:
                    raise InvalidConfig(
                        "Empty credentials for FCM config options"
                    )
                for creds in router_conf["fcm"]["creds"].values():
                    if "auth" not in creds:
                        raise InvalidConfig(
                            "Missing auth for FCM config options"
                        )
            except (ValueError, TypeError):
                raise InvalidConfig(
                    "Invalid JSON specified for FCM config options"
                )

        if ns.adm_creds:
            # Create a common admclient
            try:
                router_conf["adm"] = json.loads(ns.adm_creds)
            except (ValueError, TypeError):
                raise InvalidConfig(
                    "Invalid JSON specified for ADM config options")

        ami_id = None
        # Not a fan of double negatives, but this makes more
        # understandable args
        if not ns.no_aws:
            ami_id = get_amid() or "Unknown"

        allow_table_rotation = not ns.no_table_rotation
        return cls(
            crypto_key=ns.crypto_key,
            datadog_api_key=ns.datadog_api_key,
            datadog_app_key=ns.datadog_app_key,
            datadog_flush_interval=ns.datadog_flush_interval,
            hostname=ns.hostname,
            statsd_host=ns.statsd_host,
            statsd_port=ns.statsd_port,
            router_conf=router_conf,
            resolve_hostname=ns.resolve_hostname,
            ami_id=ami_id,
            client_certs=client_certs,
            msg_limit=ns.msg_limit,
            connect_timeout=ns.connection_timeout,
            memusage_port=ns.memusage_port,
            use_cryptography=ns.use_cryptography,
            no_sslcontext_cache=ns._no_sslcontext_cache,
            router_table=dict(
                tablename=ns.router_tablename,
                read_throughput=ns.router_read_throughput,
                write_throughput=ns.router_write_throughput
            ),
            message_table=dict(
                tablename=ns.message_tablename,
                read_throughput=ns.message_read_throughput,
                write_throughput=ns.message_write_throughput
            ),
            ssl=dict(
                key=ns.ssl_key,
                cert=ns.ssl_cert,
                dh_param=ns.ssl_dh_param
            ),
            sts_max_age=ns.sts_max_age,
            allow_table_rotation=allow_table_rotation,
            **kwargs
        )

    def make_endpoint(self, uaid, chid, key=None):
        """Create an v1 or v2 WebPush endpoint from the identifiers.

        Both endpoints use bytes instead of hex to reduce ID length.
        v1 is the uaid + chid
        v2 is the uaid + chid + sha256(key).bytes

        :param uaid: User Agent Identifier
        :param chid: Channel or Subscription ID
        :param key: Optional Base64 URL-encoded application server key
        :returns: Push endpoint

        """
        root = self.endpoint_url + '/wpush/'
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))

        if key is None:
            return root + 'v1/' + self.fernet.encrypt(base).strip('=')

        raw_key = base64url_decode(key.encode('utf8'))
        ep = self.fernet.encrypt(base + sha256(raw_key).digest()).strip('=')
        return root + 'v2/' + ep

    def parse_endpoint(self, metrics, token, version="v1", ckey_header=None,
                       auth_header=None):
        """Parse an endpoint into component elements of UAID, CHID and optional
        key hash if v2

        :param token: The obscured subscription data.
        :param version: This is the API version of the token.
        :param ckey_header: the Crypto-Key header bearing the public key
            (from Crypto-Key: p256ecdsa=)
        :param auth_header: The Authorization header bearing the VAPID info

        :raises ValueError: In the case of a malformed endpoint.

        :returns: a dict containing (uaid=UAID, chid=CHID, public_key=KEY)

        """
        token = self.fernet.decrypt(repad(token).encode('utf8'))
        public_key = None
        if ckey_header:
            try:
                crypto_key = CryptoKey(ckey_header)
            except CryptoKeyException:
                raise InvalidTokenException("Invalid key data")
            public_key = crypto_key.get_label('p256ecdsa')
        if auth_header:
            vapid_auth = parse_auth_header(auth_header)
            if not vapid_auth:
                raise VapidAuthException("Invalid Auth token")
            metrics.increment("notification.auth",
                              tags="vapid:{version},scheme:{scheme}".format(
                                  **vapid_auth
                              ).split(","))
            # pull the public key from the VAPID auth header if needed
            try:
                if vapid_auth['version'] != 1:
                    public_key = vapid_auth['k']
            except KeyError:
                raise VapidAuthException("Missing Public Key")
        if version == 'v1' and len(token) != 32:
            raise InvalidTokenException("Corrupted push token")
        if version == 'v2':
            if not auth_header:
                raise VapidAuthException("Missing Authorization Header")
            if len(token) != 64:
                raise InvalidTokenException("Corrupted push token")
            if not public_key:
                raise VapidAuthException("Invalid key data")
            try:
                decoded_key = base64url_decode(public_key)
            except TypeError:
                raise VapidAuthException("Invalid key data")
            if not constant_time.bytes_eq(sha256(decoded_key).digest(),
                                          token[32:]):
                raise VapidAuthException("Key mismatch")
        return dict(uaid=token[:16].encode('hex'),
                    chid=token[16:32].encode('hex'),
                    version=version,
                    public_key=public_key)
示例#33
0
    def test_encrypt(self, backend):
        f1 = Fernet(base64.urlsafe_b64encode(b"\x00" * 32), backend=backend)
        f2 = Fernet(base64.urlsafe_b64encode(b"\x01" * 32), backend=backend)
        f = MultiFernet([f1, f2])

        assert f1.decrypt(f.encrypt(b"abc")) == b"abc"
示例#34
0
文件: core.py 项目: MisaGu/chess
class RedisChannelLayer(BaseChannelLayer):
    """
    ORM-backed channel environment. For development use only; it will span
    multiple processes fine, but it's going to be pretty bad at throughput.
    """

    blpop_timeout = 5

    def __init__(self, expiry=60, hosts=None, prefix="asgi:", group_expiry=86400, capacity=100, channel_capacity=None,
                 symmetric_encryption_keys=None):
        super(RedisChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )
        # Make sure they provided some hosts, or provide a default
        if not hosts:
            hosts = [("localhost", 6379)]
        self.hosts = []
        for entry in hosts:
            if isinstance(entry, six.string_types):
                self.hosts.append(entry)
            else:
                self.hosts.append("redis://%s:%d/0" % (entry[0],entry[1]))
        self.prefix = prefix
        assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
        # Precalculate some values for ring selection
        self.ring_size = len(self.hosts)
        self.ring_divisor = int(math.ceil(4096 / float(self.ring_size)))
        # Create connections ahead of time (they won't call out just yet, but
        # we want to connection-pool them later)
        self._connection_list = [
            redis.Redis.from_url(host)
            for host in self.hosts
        ]
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(random.choice(string.ascii_letters) for i in range(8))
        # Register scripts
        connection = self.connection(None)
        self.chansend = connection.register_script(self.lua_chansend)
        self.lpopmany = connection.register_script(self.lua_lpopmany)
        self.delprefix = connection.register_script(self.lua_delprefix)
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, six.string_types):
                raise ValueError("symmetric_encryption_keys must be a list of possible keys")
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError("Cannot run with encryption without 'cryptography' installed.")
            sub_fernets = [self.make_fernet(key) for key in symmetric_encryption_keys]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    ### ASGI API ###

    extensions = ["groups", "flush"]

    def send(self, channel, message):
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Write out message into expiring key (avoids big items in list)
        # TODO: Use extended set, drop support for older redis?
        message_key = self.prefix + uuid.uuid4().hex
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for response
        # channels, random for normal channels
        if "!" in channel or "?" in channel:
            index = self.consistent_hash(channel)
            connection = self.connection(index)
        else:
            connection = self.connection(None)
        # Use the Lua function to do the set-and-push
        try:
            self.chansend(
                keys=[message_key, channel_key],
                args=[self.serialize(message), self.expiry, self.get_capacity(channel)],
            )
        except redis.exceptions.ResponseError as e:
            # The Lua script handles capacity checking and sends the "full" error back
            if e.args[0] == "full":
                raise self.ChannelFull

    def receive_many(self, channels, block=False):
        if not channels:
            return None, None
        channels = list(channels)
        assert all(self.valid_channel_name(channel) for channel in channels), "One or more channel names invalid"
        # Work out what servers to listen on for the given channels
        indexes = {}
        random_index = self.random_index()
        for channel in channels:
            if "!" in channel or "?" in channel:
                indexes.setdefault(self.consistent_hash(channel), []).append(channel)
            else:
                indexes.setdefault(random_index, []).append(channel)
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            connection = self.connection(index)
            channels = indexes[index]
            # Shuffle channels to avoid the first ones starving others of workers
            random.shuffle(channels)
            # Pop off any waiting message
            list_names = [self.prefix + channel for channel in channels]
            if block:
                result = connection.blpop(list_names, timeout=self.blpop_timeout)
            else:
                result = self.lpopmany(keys=list_names, client=connection)
            if result:
                content = connection.get(result[1])
                # If the content key expired, keep going.
                if content is None:
                    continue
                # Return the channel it's from and the message
                return result[0][len(self.prefix):].decode("utf8"), self.deserialize(content)
            else:
                return None, None

    def new_channel(self, pattern):
        assert isinstance(pattern, six.text_type)
        # Keep making channel names till one isn't present.
        while True:
            random_string = "".join(random.choice(string.ascii_letters) for i in range(12))
            assert pattern.endswith("!") or pattern.endswith("?")
            new_name = pattern + random_string
            # Get right connection
            index = self.consistent_hash(new_name)
            connection = self.connection(index)
            # Check to see if it's in the connected Redis.
            # This fails to stop collisions for sharding where the channel is
            # non-single-listener, but that seems very unlikely.
            key = self.prefix + new_name
            if not connection.exists(key):
                return new_name

    ### ASGI Group extension ###

    def group_add(self, group, channel):
        """
        Adds the channel to the named group for at least 'expiry'
        seconds (expiry defaults to message expiry if not provided).
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        group_key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Add to group sorted set with creation time as timestamp
        connection.zadd(
            group_key,
            **{channel: time.time()}
        )
        # Set both expiration to be group_expiry, since everything in
        # it at this point is guaranteed to expire before that
        connection.expire(group_key, self.group_expiry)
        # Also add to a normal set that contains all the groups a channel is in
        # (as yet unused)
        channel_key = self._channel_groups_key(channel)
        connection = self.connection(self.consistent_hash(channel))
        connection.sadd(channel_key, group)
        connection.expire(channel_key, self.group_expiry)

    def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        self.connection(self.consistent_hash(group)).zrem(
            key,
            channel,
        )

    def group_channels(self, group):
        """
        Returns all channels in the group as an iterable.
        """
        key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Discard old channels based on group_expiry
        connection.zremrangebyscore(key, 0, int(time.time()) - self.group_expiry)
        # Return current lot
        return [x.decode("utf8") for x in connection.zrange(
            key,
            0,
            -1,
        )]

    def send_group(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # TODO: More efficient implementation (lua script per shard?)
        for channel in self.group_channels(group):
            try:
                self.send(channel, message)
            except self.ChannelFull:
                pass

    def _group_key(self, group):
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    def _channel_groups_key(self, group):
        return ("%s:chgroups:%s" % (self.prefix, group)).encode("utf8")

    ### Flush extension ###

    def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        for connection in self._connection_list:
            self.delprefix(keys=[], args=[self.prefix+"*"], client=connection)

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Redis Lua scripts ###

    # Single-command channel send. Returns error if over capacity.
    # Keys: message, channel_list
    # Args: content, expiry, capacity
    lua_chansend = """
        if redis.call('llen', KEYS[2]) >= tonumber(ARGV[3]) then
            return redis.error_reply("full")
        end
        redis.call('set', KEYS[1], ARGV[1])
        redis.call('expire', KEYS[1], ARGV[2])
        redis.call('rpush', KEYS[2], KEYS[1])
        redis.call('expire', KEYS[2], ARGV[2] + 1)
    """

    lua_lpopmany = """
        for keyCount = 1, #KEYS do
            local result = redis.call('LPOP', KEYS[keyCount])
            if result then
                return {KEYS[keyCount], result}
            end
        end
        return {nil, nil}
    """

    lua_delprefix = """
        local keys = redis.call('keys', ARGV[1])
        for i=1,#keys,5000 do
            redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
        end
    """

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using MD5, then down to one of the ring nodes.
        """
        if isinstance(value, six.text_type):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xffffffff
        return (bigval // 0x100000) // self.ring_divisor

    def random_index(self):
        return random.randint(0, len(self.hosts) - 1)

    def connection(self, index):
        """
        Returns the correct connection for the current thread.

        Pass key to use a server based on consistent hashing of the key value;
        pass None to use a random server instead.
        """
        # If index is explicitly None, pick a random server
        if index is None:
            index = self.random_index()
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" % (self.ring_size, index))
        return self._connection_list[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, six.text_type):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
示例#35
0
class Cryptograph(object):
    """Symmetric encryption and decryption for the storage of sensitive data.

    We currently rely on Fernet, which was the algorithm adopted by Gratipay:
    https://github.com/gratipay/gratipay.com/pull/3998#issuecomment-216227070

    For encryption Fernet uses the AES cipher in CBC mode with PKCS7 padding and
    a 128 bits key. For authentication it uses HMAC-SHA256 with another 128 bits
    key.

    Fernet messages contain the timestamp at which they were generated *in plain
    text*. This isn't a problem for us since we want to store the time at which
    the data was encrypted in order to facilitate key rotation.

    We use CBOR (Concise Binary Object Representation) to serialize objects
    before encryption. Compared to JSON, CBOR is faster to parse and serialize,
    more compact, and extensible (it can represent any data type using "tags").
    More info on CBOR: http://cbor.io/ https://tools.ietf.org/html/rfc7049
    """

    KEY_ROTATION_DELAY = timedelta(weeks=1)

    def __init__(self):
        if website.env.aws_secret_access_key:
            sm = self.secrets_manager = boto3.client('secretsmanager', region_name='eu-west-1')
            secret = sm.get_secret_value(SecretId='Fernet')
            rotation_start = secret['CreatedDate'].date()
            keys = secret['SecretString'].split()
        else:
            self.secrets_manager = None
            parts = os.environ['SECRET_FERNET_KEYS'].split()
            rotation_start = date(*map(int, parts[0].split('-')))
            keys = parts[1:]
        self.fernet_rotation_start = rotation_start
        self.fernet_keys = [k.encode('ascii') for k in keys]
        self.fernet = MultiFernet([Fernet(k) for k in self.fernet_keys])

    def encrypt_dict(self, dic, allow_single_key=False):
        """Serialize and encrypt a dictionary for storage in the database.

        Encrypting partially predictable data may help an attacker break the
        encryption key, so to make our data less predictable we randomize the
        order of the dict's items before serializing it.

        For this to be effective the CBOR serializer must not sort the items
        again in an attempt to produce Canonical CBOR, so we explicitly pass
        `canonical=False` to the `cbor.dumps` function.

        In addition, the dict must not contain only one key if that key is
        predictable, so a `CryptoWarning` is emitted when `dic` only contains
        one key, unless `allow_single_key` is set to `True`.
        """
        dic = self.randomize_dict(dic, allow_single_key=allow_single_key)
        serialized = cbor.dumps(dic, canonical=False)
        encrypted = self.fernet.encrypt(serialized)
        return Encrypted(dict(scheme='fernet', payload=encrypted, ts=utcnow()))

    def decrypt(self, scheme, payload):
        """Decrypt and reconstruct an object stored in the database.
        """
        if scheme == 'fernet':
            decrypted = self.fernet.decrypt(payload)
        else:
            raise ValueError('unknown encryption scheme %r' % scheme)
        return cbor.loads(decrypted)

    @staticmethod
    def randomize_dict(dic, allow_single_key=False):
        """Randomize the order of a dictionary's items.

        Emits a `CryptoWarning` if `dic` only contains one key, unless
        `allow_single_key` is set to `True`.
        """
        if not isinstance(dic, dict):
            raise TypeError("expected a dict, got %s" % type(dic))
        # Compute the number of random bytes needed based on the size of the dict
        n = len(dic)
        if n < 2:
            # Can't randomize the order if the dict contains less than 2 items
            if n == 1 and not allow_single_key:
                warnings.warn("dict only contains one key", CryptoWarning)
            return dic
        n = int(log(n, 2) // 8) + 2
        # Return a new ordered dict sorted randomly
        return OrderedDict(
            t[1] for t in sorted((urandom(n), item) for item in dic.items())
        )

    def rotate_key(self):
        """Generate a new key and send it to the secrets manager.
        """
        keys = b' '.join([Fernet.generate_key()] + self.fernet_keys).decode()
        if self.secrets_manager:
            self.secrets_manager.update_secret(SecretId='Fernet', SecretString=keys)
        else:
            keys = utcnow().date().isoformat() + ' ' + keys
            print("No secrets manager, updating the key storage is up to you.")
        return keys

    def rotate_message(self, msg, force=False):
        """Re-encrypt a single message using the current primary key.

        The original timestamp included in the message is always preserved.
        Moreover the entire message is returned unchanged if it was already
        encrypted from the latest key and `force` is `False` (the default).

        `InvalidToken` is raised if decryption fails.
        """
        timestamp, data = Fernet._get_unverified_token_data(msg)
        for i, fernet in enumerate(self.fernet._fernets):
            try:
                p = fernet._decrypt_data(data, timestamp, None)
            except InvalidToken:
                continue
            if i == 0 and not force:
                # This message was encrypted using the latest key, return it
                return msg
            break
        else:
            raise InvalidToken

        iv = os.urandom(16)
        return self.fernet._fernets[0]._encrypt_from_parts(p, timestamp, iv)

    def rotate_stored_data(self, wait=True):
        """Re-encrypt all the sensitive information stored in our database.

        This function is a special kind of "cron job" that returns one of two
        constants from the `liberapay.cron` module: `CRON_ENCORE`, indicating
        that the function needs to be run again to continue its work, or
        `CRON_STOP`, indicating that all the ciphertexts are up-to-date (or that
        it isn't time to rotate yet).

        Rows are processed in batches of 50. Timestamps are used to keep track of
        progress and to avoid overwriting new data with re-encrypted old data.

        The update only starts one week after the new key was generated, unless
        `wait` is set to `False`. This delay is to "ensure" that the previous
        key is no longer being used to encrypt new data.
        """
        update_start = self.fernet_rotation_start + self.KEY_ROTATION_DELAY
        if wait:
            if utcnow().date() < update_start:
                return CRON_STOP

        with website.db.get_cursor() as cursor:
            batch = cursor.all("""
                SELECT id, info
                  FROM identities
                 WHERE (info).ts <= %s
              ORDER BY (info).ts ASC
                 LIMIT 50
            """, (update_start,))
            if not batch:
                return CRON_STOP

            sql = """
                UPDATE identities
                   SET info = ('fernet', %s, current_timestamp)::encrypted
                 WHERE id = %s
                   AND (info).ts = %s;
            """
            args_list = [
                (self.rotate_message(r.info.payload), r.id, r.info.ts)
                for r in batch
            ]
            execute_batch(cursor, sql, args_list)

        return CRON_ENCORE
示例#36
0
class RedisChannelLayer(BaseChannelLayer):
    """
    Redis channel layer.

    It routes all messages into remote Redis server.  Support for
    sharding among different Redis installations and message
    encryption are provided.  Both synchronous and asynchronous (via
    Twisted) approaches are implemented.
    """

    blpop_timeout = 5
    global_statistics_expiry = 86400
    channel_statistics_expiry = 3600
    global_stats_key = '#global#'  # needs to be invalid as a channel name

    def __init__(
        self,
        expiry=60,
        hosts=None,
        prefix="asgi:",
        group_expiry=86400,
        capacity=100,
        channel_capacity=None,
        symmetric_encryption_keys=None,
        stats_prefix="asgi-meta:",
        connection_kwargs=None,
    ):
        super(RedisChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )
        self.hosts = self._setup_hosts(hosts)

        self.prefix = prefix
        assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
        # Precalculate some values for ring selection
        self.ring_size = len(self.hosts)
        # Create connections ahead of time (they won't call out just yet, but
        # we want to connection-pool them later)
        socket_timeout = connection_kwargs and connection_kwargs.get(
            "socket_timeout", None)
        if socket_timeout and socket_timeout < self.blpop_timeout:
            raise ValueError("The socket timeout must be at least %s seconds" %
                             self.blpop_timeout)
        self._connection_list = self._generate_connections(
            redis_kwargs=connection_kwargs or {}, )
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8))
        self._register_scripts()
        self._setup_encryption(symmetric_encryption_keys)
        self.stats_prefix = stats_prefix

    def _setup_hosts(self, hosts):
        # Make sure they provided some hosts, or provide a default
        final_hosts = list()
        if not hosts:
            hosts = [("localhost", 6379)]

        if isinstance(hosts, six.string_types):
            # user accidentally used one host string instead of providing a list of hosts
            raise ValueError(
                'ASGI Redis hosts must be specified as an iterable list of hosts.'
            )

        for entry in hosts:
            if isinstance(entry, six.string_types):
                final_hosts.append(entry)
            else:
                final_hosts.append("redis://%s:%d/0" % (entry[0], entry[1]))
        return final_hosts

    def _register_scripts(self):
        connection = self.connection(None)
        self.chansend = connection.register_script(self.lua_chansend)
        self.lpopmany = connection.register_script(self.lua_lpopmany)
        self.delprefix = connection.register_script(self.lua_delprefix)
        self.incrstatcounters = connection.register_script(
            self.lua_incrstatcounters)

    def _setup_encryption(self, symmetric_encryption_keys):
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, six.string_types):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [
                self.make_fernet(key) for key in symmetric_encryption_keys
            ]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    def _generate_connections(self, redis_kwargs):
        return [
            redis.Redis.from_url(host, **redis_kwargs) for host in self.hosts
        ]

    ### ASGI API ###

    extensions = ["groups", "flush", "statistics"]
    try:
        import txredisapi
    except ImportError:
        pass
    else:
        extensions.append("twisted")

    def send(self, channel, message):
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Make sure the message does not contain reserved keys
        assert "__asgi_channel__" not in message
        # If it's a process-local channel, strip off local part and stick full name in message
        if "!" in channel:
            message = dict(message.items())
            message['__asgi_channel__'] = channel
            channel = self.non_local_name(channel)
        # Write out message into expiring key (avoids big items in list)
        # TODO: Use extended set, drop support for older redis?
        message_key = self.prefix + uuid.uuid4().hex
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for response
        # channels, random for normal channels
        if "!" in channel or "?" in channel:
            index = self.consistent_hash(channel)
            connection = self.connection(index)
        else:
            connection = self.connection(None)
        # Use the Lua function to do the set-and-push
        try:
            self.chansend(
                keys=[message_key, channel_key],
                args=[
                    self.serialize(message), self.expiry,
                    self.get_capacity(channel)
                ],
                client=connection,
            )
            self._incr_statistics_counter(
                stat_name=self.STAT_MESSAGES_COUNT,
                channel=channel,
                connection=connection,
            )
        except redis.exceptions.ResponseError as e:
            # The Lua script handles capacity checking and sends the "full" error back
            if e.args[0] == "full":
                self._incr_statistics_counter(
                    stat_name=self.STAT_CHANNEL_FULL,
                    channel=channel,
                    connection=connection,
                )
                raise self.ChannelFull
            elif "unknown command" in e.args[0]:
                raise UnsupportedRedis(
                    "Redis returned an error (%s). Please ensure you're running a "
                    " version of redis that is supported by asgi_redis." %
                    e.args[0])
            else:
                # Let any other exception bubble up
                raise

    def receive(self, channels, block=False):
        # List name get
        indexes = self._receive_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            return None, None
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Open a connection
            connection = self.connection(index)
            # Pop off any waiting message
            if block:
                try:
                    result = connection.blpop(list_names,
                                              timeout=self.blpop_timeout)
                except redis.exceptions.TimeoutError:
                    continue
            else:
                result = self.lpopmany(keys=list_names, client=connection)
            if result:
                content = connection.get(result[1])
                # If the content key expired, keep going.
                if content is None:
                    continue
                # Return the channel it's from and the message
                channel = result[0][len(self.prefix):].decode("utf8")
                message = self.deserialize(content)
                # If there is a full channel name stored in the message, unpack it.
                if "__asgi_channel__" in message:
                    channel = message['__asgi_channel__']
                    del message['__asgi_channel__']
                return channel, message
            else:
                return None, None

    def _receive_list_names(self, channels):
        """
        Inner logic of receive; takes channels, groups by shard, and
        returns {connection_index: list_names ...} if a query is needed or
        None for a vacuously empty response.
        """
        # Short circuit if no channels
        if not channels:
            return None
        # Check channel names are valid
        channels = list(channels)
        assert all(
            self.valid_channel_name(channel, receive=True)
            for channel in channels), "One or more channel names invalid"
        # Work out what servers to listen on for the given channels
        indexes = {}
        random_index = self.random_index()
        for channel in channels:
            if "!" in channel or "?" in channel:
                indexes.setdefault(self.consistent_hash(channel),
                                   []).append(self.prefix + channel, )
            else:
                indexes.setdefault(random_index,
                                   []).append(self.prefix + channel, )
        return indexes

    def new_channel(self, pattern):
        assert isinstance(pattern, six.text_type)
        # Keep making channel names till one isn't present.
        while True:
            random_string = "".join(
                random.choice(string.ascii_letters) for i in range(12))
            assert pattern.endswith("?")
            new_name = pattern + random_string
            # Get right connection
            index = self.consistent_hash(new_name)
            connection = self.connection(index)
            # Check to see if it's in the connected Redis.
            # This fails to stop collisions for sharding where the channel is
            # non-single-listener, but that seems very unlikely.
            key = self.prefix + new_name
            if not connection.exists(key):
                return new_name

    ### ASGI Group extension ###

    def group_add(self, group, channel):
        """
        Adds the channel to the named group for at least 'expiry'
        seconds (expiry defaults to message expiry if not provided).
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        group_key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Add to group sorted set with creation time as timestamp
        connection.zadd(group_key, **{channel: time.time()})
        # Set both expiration to be group_expiry, since everything in
        # it at this point is guaranteed to expire before that
        connection.expire(group_key, self.group_expiry)

    def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        self.connection(self.consistent_hash(group)).zrem(
            key,
            channel,
        )

    def group_channels(self, group):
        """
        Returns all channels in the group as an iterable.
        """
        key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Discard old channels based on group_expiry
        connection.zremrangebyscore(key, 0,
                                    int(time.time()) - self.group_expiry)
        # Return current lot
        return [x.decode("utf8") for x in connection.zrange(
            key,
            0,
            -1,
        )]

    def send_group(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # TODO: More efficient implementation (lua script per shard?)
        for channel in self.group_channels(group):
            try:
                self.send(channel, message)
            except self.ChannelFull:
                pass

    def _group_key(self, group):
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    ### Flush extension ###

    def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        for connection in self._connection_list:
            self.delprefix(keys=[],
                           args=[self.prefix + "*"],
                           client=connection)
            self.delprefix(keys=[],
                           args=[self.stats_prefix + "*"],
                           client=connection)

    ### Twisted extension ###

    @defer.inlineCallbacks
    def receive_twisted(self, channels):
        """
        Twisted-native implementation of receive.
        """
        # List name get
        indexes = self._receive_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            defer.returnValue((None, None))
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Get a sync connection for conn details
            sync_connection = self.connection(index)
            twisted_connection = yield txredisapi.ConnectionPool(
                host=sync_connection.connection_pool.connection_kwargs['host'],
                port=sync_connection.connection_pool.connection_kwargs['port'],
                dbid=sync_connection.connection_pool.connection_kwargs['db'],
                password=sync_connection.connection_pool.
                connection_kwargs['password'],
            )
            try:
                # Pop off any waiting message
                result = yield twisted_connection.blpop(
                    list_names, timeout=self.blpop_timeout)
                if result:
                    content = yield twisted_connection.get(result[1])
                    # If the content key expired, keep going.
                    if content is None:
                        continue
                    # Return the channel it's from and the message
                    channel = result[0][len(self.prefix):]
                    message = self.deserialize(content)
                    # If there is a full channel name stored in the message, unpack it.
                    if "__asgi_channel__" in message:
                        channel = message['__asgi_channel__']
                        del message['__asgi_channel__']
                    defer.returnValue((channel, message))
                else:
                    defer.returnValue((None, None))
            finally:
                yield twisted_connection.disconnect()

    ### statistics extension ###

    STAT_MESSAGES_COUNT = 'messages_count'
    STAT_MESSAGES_PENDING = 'messages_pending'
    STAT_MESSAGES_MAX_AGE = 'messages_max_age'
    STAT_CHANNEL_FULL = 'channel_full_count'

    def global_statistics(self):
        """
        Returns dictionary of statistics across all channels on all shards.
        Return value is a dictionary with following fields:
            * messages_count, the number of messages processed since server start
            * channel_full_count, the number of times ChannelFull exception has been risen since server start

        This implementation does not provide calculated per second values.
        Due perfomance concerns, does not provide aggregated messages_pending and messages_max_age,
        these are only avaliable per channel.

        """
        return self._count_global_stats(self._connection_list)

    def _count_global_stats(self, connection_list):
        statistics = {
            self.STAT_MESSAGES_COUNT: 0,
            self.STAT_CHANNEL_FULL: 0,
        }
        prefix = self.stats_prefix + self.global_stats_key
        for connection in connection_list:
            messages_count, channel_full_count = connection.mget(
                ':'.join((prefix, self.STAT_MESSAGES_COUNT)),
                ':'.join((prefix, self.STAT_CHANNEL_FULL)),
            )
            statistics[self.STAT_MESSAGES_COUNT] += int(messages_count or 0)
            statistics[self.STAT_CHANNEL_FULL] += int(channel_full_count or 0)

        return statistics

    def channel_statistics(self, channel):
        """
        Returns dictionary of statistics for specified channel.
        Return value is a dictionary with following fields:
            * messages_count, the number of messages processed since server start
            * messages_pending, the current number of messages waiting
            * messages_max_age, how long the oldest message has been waiting, in seconds
            * channel_full_count, the number of times ChannelFull exception has been risen since server start

        This implementation does not provide calculated per second values
        """
        if "!" in channel or "?" in channel:
            connections = [self.connection(self.consistent_hash(channel))]
        else:
            # if we don't know where it is, we have to check in all shards
            connections = self._connection_list
        return self._count_channel_stats(channel, connections)

    def _count_channel_stats(self, channel, connections):
        statistics = {
            self.STAT_MESSAGES_COUNT: 0,
            self.STAT_MESSAGES_PENDING: 0,
            self.STAT_MESSAGES_MAX_AGE: 0,
            self.STAT_CHANNEL_FULL: 0,
        }
        prefix = self.stats_prefix + channel

        channel_key = self.prefix + channel
        for connection in connections:
            messages_count, channel_full_count = connection.mget(
                ':'.join((prefix, self.STAT_MESSAGES_COUNT)),
                ':'.join((prefix, self.STAT_CHANNEL_FULL)),
            )
            statistics[self.STAT_MESSAGES_COUNT] += int(messages_count or 0)
            statistics[self.STAT_CHANNEL_FULL] += int(channel_full_count or 0)
            statistics[self.STAT_MESSAGES_PENDING] += connection.llen(
                channel_key)
            oldest_message = connection.lindex(channel_key, 0)
            if oldest_message:
                messages_age = self.expiry - connection.ttl(oldest_message)
                statistics[self.STAT_MESSAGES_MAX_AGE] = max(
                    statistics[self.STAT_MESSAGES_MAX_AGE], messages_age)
        return statistics

    def _incr_statistics_counter(self, stat_name, channel, connection):
        """ helper function to intrement counter stats in one go """
        self.incrstatcounters(
            keys=[
                "{prefix}{channel}:{stat_name}".format(
                    prefix=self.stats_prefix,
                    channel=channel,
                    stat_name=stat_name,
                ), "{prefix}{global_key}:{stat_name}".format(
                    prefix=self.stats_prefix,
                    global_key=self.global_stats_key,
                    stat_name=stat_name,
                )
            ],
            args=[
                self.channel_statistics_expiry, self.global_statistics_expiry
            ],
            client=connection,
        )

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Redis Lua scripts ###

    # Single-command channel send. Returns error if over capacity.
    # Keys: message, channel_list
    # Args: content, expiry, capacity
    lua_chansend = """
        if redis.call('llen', KEYS[2]) >= tonumber(ARGV[3]) then
            return redis.error_reply("full")
        end
        redis.call('set', KEYS[1], ARGV[1])
        redis.call('expire', KEYS[1], ARGV[2])
        redis.call('rpush', KEYS[2], KEYS[1])
        redis.call('expire', KEYS[2], ARGV[2] + 1)
    """

    # Single-command to increment counter stats.
    # Keys: channel_stat, global_stat
    # Args: channel_stat_expiry, global_stat_expiry
    lua_incrstatcounters = """
        redis.call('incr', KEYS[1])
        redis.call('expire', KEYS[1], ARGV[1])
        redis.call('incr', KEYS[2])
        redis.call('expire', KEYS[2], ARGV[2])

    """

    lua_lpopmany = """
        for keyCount = 1, #KEYS do
            local result = redis.call('LPOP', KEYS[keyCount])
            if result then
                return {KEYS[keyCount], result}
            end
        end
        return {nil, nil}
    """

    lua_delprefix = """
        local keys = redis.call('keys', ARGV[1])
        for i=1,#keys,5000 do
            redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
        end
    """

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using CRC, then down to one of the ring nodes.
        """
        if isinstance(value, six.text_type):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xfff
        ring_divisor = 4096 / float(self.ring_size)
        return int(bigval / ring_divisor)

    def random_index(self):
        return random.randint(0, len(self.hosts) - 1)

    def connection(self, index):
        """
        Returns the correct connection for the current thread.

        Pass key to use a server based on consistent hashing of the key value;
        pass None to use a random server instead.
        """
        # If index is explicitly None, pick a random server
        if index is None:
            index = self.random_index()
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" %
                             (self.ring_size, index))
        return self._connection_list[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, six.text_type):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
示例#37
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval",
               "max_data"]

    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 auth_key=None,
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if auth_key is None:
            auth_key = []
        if not isinstance(auth_key, list):
            auth_key = [auth_key]
        self.auth_key = auth_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.message_table = get_message_table(message_tablename,
                                               message_read_throughput,
                                               message_write_throughput)
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)
        self.message = Message(self.message_table, self.metrics)

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_endpoint(self, uaid, chid):
        """ Create an endpoint from the identifiers"""
        return self.endpoint_url + '/push/' + \
            self.fernet.encrypt((uaid + ':' + chid).encode('utf8'))
示例#38
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval",
               "max_data"]

    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 bear_hash_key=None,
                 preflight_uaid="deadbeef00000000deadbeef000000000",
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if bear_hash_key is None:
            bear_hash_key = []
        if not isinstance(bear_hash_key, list):
            bear_hash_key = [bear_hash_key]
        self.bear_hash_key = bear_hash_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(
            storage_tablename,
            storage_read_throughput,
            storage_write_throughput)
        self.message_table = get_rotating_message_table(
            message_tablename)
        self._message_prefix = message_tablename
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Used to determine whether a connection is out of date with current
        # db objects. There are three noteworty cases:
        # 1 "Last Month" the table requires a rollover.
        # 2 "This Month" the most common case.
        # 3 "Next Month" where the system will soon be rolling over, but with
        #   timing, some nodes may roll over sooner. Ensuring the next month's
        #   table is present before the switchover is the main reason for this,
        #   just in case some nodes do switch sooner.
        self.create_initial_message_tables()

        # Run preflight check
        preflight_check(self.storage, self.router, preflight_uaid)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    @property
    def message(self):
        """Property that access the current message table"""
        return self.message_tables[self.current_msg_month]

    @message.setter
    def message(self, value):
        """Setter to set the current message table"""
        self.message_tables[self.current_msg_month] = value

    def _tomorrow(self):
        return datetime.date.today() + datetime.timedelta(days=1)

    def create_initial_message_tables(self):
        """Initializes a dict of the initial rotating messages tables.

        An entry for last months table, an entry for this months table,
        an entry for tomorrow, if tomorrow is a new month.

        """
        today = datetime.date.today()
        last_month = get_rotating_message_table(self._message_prefix, -1)
        this_month = get_rotating_message_table(self._message_prefix)
        self.current_month = today.month
        self.current_msg_month = this_month.table_name
        self.message_tables = {
            last_month.table_name: Message(last_month, self.metrics),
            this_month.table_name: Message(this_month, self.metrics)
        }
        if self._tomorrow().month != today.month:
            next_month = get_rotating_message_table(delta=1)
            self.message_tables[next_month.table_name] = Message(
                next_month, self.metrics)

    @inlineCallbacks
    def update_rotating_tables(self):
        """This method is intended to be tasked to run periodically off the
        twisted event hub to rotate tables.

        When today is a new month from yesterday, then we swap out all the
        table objects on the settings object.

        """
        today = datetime.date.today()
        tomorrow = self._tomorrow()
        if ((tomorrow.month != today.month) and
                sorted(self.message_tables.keys())[-1] !=
                tomorrow.month):
            next_month = get_rotating_message_table(
                self._message_prefix, 0, tomorrow)
            self.message_tables[next_month.table_name] = Message(
                next_month, self.metrics)

        if today.month == self.current_month:
            # No change in month, we're fine.
            returnValue(False)

        # Get tables for the new month, and verify they exist before we try to
        # switch over
        message_table = yield deferToThread(get_rotating_message_table,
                                            self._message_prefix)

        # Both tables found, safe to switch-over
        self.current_month = today.month
        self.current_msg_month = message_table.table_name
        self.message_tables[self.current_msg_month] = \
            Message(message_table, self.metrics)
        returnValue(True)

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_simplepush_endpoint(self, uaid, chid):
        """Create a simplepush endpoint"""
        root = self.endpoint_url + "/spush/"
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))
        return root + 'v1/' + self.fernet.encrypt(base).strip('=')

    def make_endpoint(self, uaid, chid, key=None):
        """Create an v1 or v2 WebPush endpoint from the identifiers.

        Both endpoints use bytes instead of hex to reduce ID length.
        v0 is uaid.hex + ':' + chid.hex and is deprecated.
        v1 is the uaid + chid
        v2 is the uaid + chid + sha256(key).bytes

        :param uaid: User Agent Identifier
        :param chid: Channel or Subscription ID
        :param key: Optional Base64 URL-encoded application server key
        :returns: Push endpoint

        """
        root = self.endpoint_url + '/push/'
        base = (uaid.replace('-', '').decode("hex") +
                chid.replace('-', '').decode("hex"))

        if key is None:
            return root + 'v1/' + self.fernet.encrypt(base).strip('=')

        raw_key = base64url_decode(key.encode('utf8'))
        ep = self.fernet.encrypt(base + sha256(raw_key).digest()).strip('=')
        return root + 'v2/' + ep

    def parse_endpoint(self, token, version="v0", ckey_header=None):
        """Parse an endpoint into component elements of UAID, CHID and optional
        key hash if v2

        :param token: The obscured subscription data.
        :param version: This is the API version of the token.
        :param ckey_header: the Crypto-Key header bearing the public key
        (from Crypto-Key: p256ecdsa=)

        :raises ValueError: In the case of a malformed endpoint.

        :returns: a dict containing (uaid=UAID, chid=CHID, public_key=KEY)

        """
        token = self.fernet.decrypt(repad(token).encode('utf8'))
        public_key = None
        if ckey_header:
            try:
                crypto_key = CryptoKey(ckey_header)
            except CryptoKeyException:
                raise InvalidTokenException("Invalid key data")
            label = crypto_key.get_label('p256ecdsa')
            try:
                public_key = base64url_decode(label)
            except:
                # Ignore missing and malformed app server keys.
                pass

        if version == 'v0':
            if not VALID_V0_TOKEN.match(token):
                raise InvalidTokenException("Corrupted push token")
            items = token.split(':')
            return dict(uaid=items[0], chid=items[1], public_key=public_key)
        if version == 'v1' and len(token) != 32:
            raise InvalidTokenException("Corrupted push token")
        if version == 'v2':
            if len(token) != 64:
                raise InvalidTokenException("Corrupted push token")
            if not public_key:
                raise InvalidTokenException("Invalid key data")
            if not constant_time.bytes_eq(sha256(public_key).digest(),
                                          token[32:]):
                raise InvalidTokenException("Key mismatch")
        return dict(uaid=token[:16].encode('hex'),
                    chid=token[16:32].encode('hex'),
                    public_key=public_key)
示例#39
0
class AutopushSettings(object):
    """Main Autopush Settings Object"""
    options = ["crypto_key", "hostname", "min_ping_interval",
               "max_data"]

    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 auth_key=None,
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if auth_key is None:
            auth_key = []
        if not isinstance(auth_key, list):
            auth_key = [auth_key]
        self.auth_key = auth_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(
            storage_tablename,
            storage_read_throughput,
            storage_write_throughput)
        self.message_table = get_rotating_message_table(
            message_tablename)
        self._message_prefix = message_tablename
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Used to determine whether a connection is out of date with current
        # db objects
        self.current_msg_month = make_rotating_tablename(self._message_prefix)
        self.current_month = datetime.date.today().month
        self.create_initial_message_tables()

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

    @property
    def message(self):
        """Property that access the current message table"""
        return self.message_tables[self.current_msg_month]

    @message.setter
    def message(self, value):
        """Setter to set the current message table"""
        self.message_tables[self.current_msg_month] = value

    def create_initial_message_tables(self):
        """Initializes a dict of the initial rotating messages tables.

        An entry for last months table, and an entry for this months table.

        """
        last_month = get_rotating_message_table(self._message_prefix, -1)
        this_month = get_rotating_message_table(self._message_prefix)
        self.message_tables = {
            last_month.table_name: Message(last_month, self.metrics),
            this_month.table_name: Message(this_month, self.metrics),
        }

    @inlineCallbacks
    def update_rotating_tables(self):
        """This method is intended to be tasked to run periodically off the
        twisted event hub to rotate tables.

        When today is a new month from yesterday, then we swap out all the
        table objects on the settings object.

        """
        today = datetime.date.today()
        if today.month == self.current_month:
            # No change in month, we're fine.
            returnValue(False)

        # Get tables for the new month, and verify they exist before we try to
        # switch over
        message_table = yield deferToThread(get_rotating_message_table,
                                            self._message_prefix)

        # Both tables found, safe to switch-over
        self.current_month = today.month
        self.current_msg_month = message_table.table_name
        self.message_tables[self.current_msg_month] = \
            Message(message_table, self.metrics)
        returnValue(True)

    def update(self, **kwargs):
        """Update the arguments, if a ``crypto_key`` is in kwargs then the
        ``self.fernet`` attribute will be initialized"""
        for key, val in kwargs.items():
            if key == "crypto_key":
                fkeys = []
                if not isinstance(val, list):
                    val = [val]
                for v in val:
                    fkeys.append(Fernet(v))
                self.fernet = MultiFernet(fkeys)
            else:
                setattr(self, key, val)

    def make_endpoint(self, uaid, chid):
        """ Create an endpoint from the identifiers"""
        return self.endpoint_url + '/push/' + \
            self.fernet.encrypt((uaid + ':' + chid).encode('utf8'))
示例#40
0
class RedisChannelLayer(BaseChannelLayer):
    """
    ORM-backed channel environment. For development use only; it will span
    multiple processes fine, but it's going to be pretty bad at throughput.
    """

    blpop_timeout = 5

    def __init__(self,
                 expiry=60,
                 hosts=None,
                 prefix="asgi:",
                 group_expiry=86400,
                 capacity=100,
                 channel_capacity=None,
                 symmetric_encryption_keys=None):
        super(RedisChannelLayer, self).__init__(
            expiry=expiry,
            group_expiry=group_expiry,
            capacity=capacity,
            channel_capacity=channel_capacity,
        )
        # Make sure they provided some hosts, or provide a default
        if not hosts:
            hosts = [("localhost", 6379)]
        self.hosts = []
        for entry in hosts:
            if isinstance(entry, six.string_types):
                self.hosts.append(entry)
            else:
                self.hosts.append("redis://%s:%d/0" % (entry[0], entry[1]))
        self.prefix = prefix
        assert isinstance(self.prefix, six.text_type), "Prefix must be unicode"
        # Precalculate some values for ring selection
        self.ring_size = len(self.hosts)
        self.ring_divisor = int(math.ceil(4096 / float(self.ring_size)))
        # Create connections ahead of time (they won't call out just yet, but
        # we want to connection-pool them later)
        self._connection_list = [
            redis.Redis.from_url(host) for host in self.hosts
        ]
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8))
        # Register scripts
        connection = self.connection(None)
        self.chansend = connection.register_script(self.lua_chansend)
        self.lpopmany = connection.register_script(self.lua_lpopmany)
        self.delprefix = connection.register_script(self.lua_delprefix)
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, six.string_types):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [
                self.make_fernet(key) for key in symmetric_encryption_keys
            ]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    ### ASGI API ###

    extensions = ["groups", "flush", "twisted"]

    def send(self, channel, message):
        # Typecheck
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Write out message into expiring key (avoids big items in list)
        # TODO: Use extended set, drop support for older redis?
        message_key = self.prefix + uuid.uuid4().hex
        channel_key = self.prefix + channel
        # Pick a connection to the right server - consistent for response
        # channels, random for normal channels
        if "!" in channel or "?" in channel:
            index = self.consistent_hash(channel)
            connection = self.connection(index)
        else:
            connection = self.connection(None)
        # Use the Lua function to do the set-and-push
        try:
            self.chansend(
                keys=[message_key, channel_key],
                args=[
                    self.serialize(message), self.expiry,
                    self.get_capacity(channel)
                ],
            )
        except redis.exceptions.ResponseError as e:
            # The Lua script handles capacity checking and sends the "full" error back
            if e.args[0] == "full":
                raise self.ChannelFull

    def receive_many(self, channels, block=False):
        # List name get
        indexes = self._receive_many_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            return None, None
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Open a connection
            connection = self.connection(index)
            # Pop off any waiting message
            if block:
                result = connection.blpop(list_names,
                                          timeout=self.blpop_timeout)
            else:
                result = self.lpopmany(keys=list_names, client=connection)
            if result:
                content = connection.get(result[1])
                # If the content key expired, keep going.
                if content is None:
                    continue
                # Return the channel it's from and the message
                return result[0][len(self.prefix):].decode(
                    "utf8"), self.deserialize(content)
            else:
                return None, None

    def _receive_many_list_names(self, channels):
        """
        Inner logic of receive_many; takes channels, groups by shard, and
        returns {connection_index: list_names ...} if a query is needed or
        None for a vacuously empty response.
        """
        # Short circuit if no channels
        if not channels:
            return None
        # Check channel names are valid
        channels = list(channels)
        assert all(
            self.valid_channel_name(channel)
            for channel in channels), "One or more channel names invalid"
        # Work out what servers to listen on for the given channels
        indexes = {}
        random_index = self.random_index()
        for channel in channels:
            if "!" in channel or "?" in channel:
                indexes.setdefault(self.consistent_hash(channel),
                                   []).append(self.prefix + channel, )
            else:
                indexes.setdefault(random_index,
                                   []).append(self.prefix + channel, )
        return indexes

    def new_channel(self, pattern):
        assert isinstance(pattern, six.text_type)
        # Keep making channel names till one isn't present.
        while True:
            random_string = "".join(
                random.choice(string.ascii_letters) for i in range(12))
            assert pattern.endswith("!") or pattern.endswith("?")
            new_name = pattern + random_string
            # Get right connection
            index = self.consistent_hash(new_name)
            connection = self.connection(index)
            # Check to see if it's in the connected Redis.
            # This fails to stop collisions for sharding where the channel is
            # non-single-listener, but that seems very unlikely.
            key = self.prefix + new_name
            if not connection.exists(key):
                return new_name

    ### ASGI Group extension ###

    def group_add(self, group, channel):
        """
        Adds the channel to the named group for at least 'expiry'
        seconds (expiry defaults to message expiry if not provided).
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        group_key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Add to group sorted set with creation time as timestamp
        connection.zadd(group_key, **{channel: time.time()})
        # Set both expiration to be group_expiry, since everything in
        # it at this point is guaranteed to expire before that
        connection.expire(group_key, self.group_expiry)

    def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        self.connection(self.consistent_hash(group)).zrem(
            key,
            channel,
        )

    def group_channels(self, group):
        """
        Returns all channels in the group as an iterable.
        """
        key = self._group_key(group)
        connection = self.connection(self.consistent_hash(group))
        # Discard old channels based on group_expiry
        connection.zremrangebyscore(key, 0,
                                    int(time.time()) - self.group_expiry)
        # Return current lot
        return [x.decode("utf8") for x in connection.zrange(
            key,
            0,
            -1,
        )]

    def send_group(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # TODO: More efficient implementation (lua script per shard?)
        for channel in self.group_channels(group):
            try:
                self.send(channel, message)
            except self.ChannelFull:
                pass

    def _group_key(self, group):
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    ### Flush extension ###

    def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        for connection in self._connection_list:
            self.delprefix(keys=[],
                           args=[self.prefix + "*"],
                           client=connection)

    ### Twisted extension ###

    @defer.inlineCallbacks
    def receive_many_twisted(self, channels):
        """
        Twisted-native implementation of receive_many.
        """
        # List name get
        indexes = self._receive_many_list_names(channels)
        # Short circuit if no channels
        if indexes is None:
            defer.returnValue((None, None))
        # Get a message from one of our channels
        while True:
            # Select a random connection to use
            index = random.choice(list(indexes.keys()))
            list_names = indexes[index]
            # Shuffle list_names to avoid the first ones starving others of workers
            random.shuffle(list_names)
            # Get a sync connection for conn details
            sync_connection = self.connection(index)
            twisted_connection = yield txredisapi.ConnectionPool(
                host=sync_connection.connection_pool.connection_kwargs['host'],
                port=sync_connection.connection_pool.connection_kwargs['port'],
                dbid=sync_connection.connection_pool.connection_kwargs['db'],
            )
            try:
                # Pop off any waiting message
                result = yield twisted_connection.blpop(
                    list_names, timeout=self.blpop_timeout)
                if result:
                    content = yield twisted_connection.get(result[1])
                    # If the content key expired, keep going.
                    if content is None:
                        continue
                    # Return the channel it's from and the message
                    defer.returnValue((result[0][len(self.prefix):],
                                       self.deserialize(content)))
                else:
                    defer.returnValue((None, None))
            finally:
                yield twisted_connection.disconnect()

    ### Serialization ###

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, encoding="utf8")

    ### Redis Lua scripts ###

    # Single-command channel send. Returns error if over capacity.
    # Keys: message, channel_list
    # Args: content, expiry, capacity
    lua_chansend = """
        if redis.call('llen', KEYS[2]) >= tonumber(ARGV[3]) then
            return redis.error_reply("full")
        end
        redis.call('set', KEYS[1], ARGV[1])
        redis.call('expire', KEYS[1], ARGV[2])
        redis.call('rpush', KEYS[2], KEYS[1])
        redis.call('expire', KEYS[2], ARGV[2] + 1)
    """

    lua_lpopmany = """
        for keyCount = 1, #KEYS do
            local result = redis.call('LPOP', KEYS[keyCount])
            if result then
                return {KEYS[keyCount], result}
            end
        end
        return {nil, nil}
    """

    lua_delprefix = """
        local keys = redis.call('keys', ARGV[1])
        for i=1,#keys,5000 do
            redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
        end
    """

    ### Internal functions ###

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using MD5, then down to one of the ring nodes.
        """
        if isinstance(value, six.text_type):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xffffffff
        return (bigval // 0x100000) // self.ring_divisor

    def random_index(self):
        return random.randint(0, len(self.hosts) - 1)

    def connection(self, index):
        """
        Returns the correct connection for the current thread.

        Pass key to use a server based on consistent hashing of the key value;
        pass None to use a random server instead.
        """
        # If index is explicitly None, pick a random server
        if index is None:
            index = self.random_index()
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" %
                             (self.ring_size, index))
        return self._connection_list[index]

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, six.text_type):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)
示例#41
0
class ChannelLayer(BaseChannelLayer):
    """
    Redis channel layer.
    It routes all messages into remote Redis server. Support for
    sharding among different Redis installations and message
    encryption are provided.
    """

    blpop_timeout = 5
    queue_get_timeout = 10

    def __init__(
        self,
        hosts=None,
        prefix="anthill",
        expiry=60,
        group_expiry=86400,
        capacity=100,
        channel_capacity=None,
        symmetric_encryption_keys=None,
    ):
        # Store basic information
        self.expiry = expiry
        self.group_expiry = group_expiry
        self.capacity = capacity
        self.channel_capacity = self.compile_capacities(channel_capacity or {})
        self.prefix = prefix
        assert isinstance(self.prefix, str), "Prefix must be unicode"
        # Cached redis connection pools and the event loop they are from
        self.pools = {}
        self.pools_loop = None
        # Configure the host objects
        self.hosts = self.decode_hosts(hosts)
        self.ring_size = len(self.hosts)
        # Normal channels choose a host index by cycling through the available hosts
        self._receive_index_generator = itertools.cycle(range(len(self.hosts)))
        self._send_index_generator = itertools.cycle(range(len(self.hosts)))
        # Decide on a unique client prefix to use in ! sections
        # TODO: ensure uniqueness better, e.g. Redis keys with SETNX
        self.client_prefix = "".join(
            random.choice(string.ascii_letters) for i in range(8))
        # Set up any encryption objects
        self._setup_encryption(symmetric_encryption_keys)
        # Number of coroutines trying to receive right now
        self.receive_count = 0
        # Event loop they are trying to receive on
        self.receive_event_loop = None
        # Main receive loop running
        self.receive_loop_task = None
        # Buffered messages by process-local channel name
        self.receive_buffer = collections.defaultdict(asyncio.Queue)

    def decode_hosts(self, hosts):
        """
        Takes the value of the "hosts" argument passed to the class and returns
        a list of kwargs to use for the Redis connection constructor.
        """
        # If no hosts were provided, return a default value
        if not hosts:
            return [{"address": ("localhost", 6379)}]
        # If they provided just a string, scold them.
        if isinstance(hosts, (str, bytes)):
            raise ValueError(
                "You must pass a list of Redis hosts, even if there is only one."
            )
        # Decode each hosts entry into a kwargs dict
        result = []
        for entry in hosts:
            result.append({"address": entry})
        return result

    def _setup_encryption(self, symmetric_encryption_keys):
        # See if we can do encryption if they asked
        if symmetric_encryption_keys:
            if isinstance(symmetric_encryption_keys, (str, bytes)):
                raise ValueError(
                    "symmetric_encryption_keys must be a list of possible keys"
                )
            try:
                from cryptography.fernet import MultiFernet
            except ImportError:
                raise ValueError(
                    "Cannot run with encryption without 'cryptography' installed."
                )
            sub_fernets = [
                self.make_fernet(key) for key in symmetric_encryption_keys
            ]
            self.crypter = MultiFernet(sub_fernets)
        else:
            self.crypter = None

    # Channel layer API #

    extensions = ["groups", "flush"]

    async def send(self, channel, message):
        """
        Send a message onto a (general or specific) channel.
        """
        # Type check
        assert isinstance(message, dict), "message is not a dict"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Make sure the message does not contain reserved keys
        assert "__anthill_channel__" not in message
        # If it's a process-local channel, strip off local part and stick full name in message
        channel_non_local_name = channel
        if "!" in channel:
            message = dict(message.items())
            message["__anthill_channel__"] = channel
            channel_non_local_name = self.non_local_name(channel)
        # Write out message into expiring key (avoids big items in list)
        channel_key = self.prefix + channel_non_local_name
        # Pick a connection to the right server - consistent for specific
        # channels, random for general channels
        if "!" in channel:
            index = self.consistent_hash(channel)
        else:
            index = next(self._send_index_generator)
        async with self.connection(index) as connection:
            # Check the length of the list before send
            # This can allow the list to leak slightly over capacity, but that's fine.
            if await connection.llen(channel_key) >= self.get_capacity(
                    channel):
                raise ChannelFull()
            # Push onto the list then set it to expire in case it's not consumed
            await connection.rpush(channel_key, self.serialize(message))
            await connection.expire(channel_key, int(self.expiry))

    async def receive(self, channel):
        """
        Receive the first message that arrives on the channel.
        If more than one coroutine waits on the same channel, the first waiter
        will be given the message when it arrives.
        """
        # Make sure the channel name is valid then get the non-local part
        # and thus its index
        assert self.valid_channel_name(channel)
        if "!" in channel:
            real_channel = self.non_local_name(channel)
            assert real_channel.endswith(self.client_prefix +
                                         "!"), "Wrong client prefix"
            # Enter receiving section
            loop = asyncio.get_event_loop()
            self.receive_count += 1
            try:
                if self.receive_count == 1:
                    # If we're the first coroutine in, make a receive loop!
                    general_channel = self.non_local_name(channel)
                    self.receive_loop_task = loop.create_task(
                        self.receive_loop(general_channel))
                    self.receive_event_loop = loop
                else:
                    # Otherwise, check our event loop matches
                    if self.receive_event_loop != loop:
                        raise RuntimeError(
                            "Two event loops are trying to receive() on one channel layer at once!"
                        )
                    if self.receive_loop_task.done():
                        # Maybe raise an exception from the task
                        self.receive_loop_task.result()
                        # Raise our own exception if that failed
                        raise RuntimeError("Redis receive loop exited early")

                # Wait for our message to appear
                while True:
                    try:
                        message = await asyncio.wait_for(
                            self.receive_buffer[channel].get(),
                            self.queue_get_timeout)
                        if self.receive_buffer[channel].empty():
                            del self.receive_buffer[channel]
                        return message
                    except asyncio.TimeoutError:
                        # See if we need to propagate a dead receiver exception
                        if self.receive_loop_task.done():
                            self.receive_loop_task.result()

            finally:
                self.receive_count -= 1
                # If we were the last out, stop the receive loop
                if self.receive_count == 0:
                    self.receive_loop_task.cancel()
        else:
            # Do a plain direct receive
            return (await self.receive_single(channel))[1]

    async def receive_loop(self, general_channel):
        """
        Continuous-receiving loop that makes sure something is fetching results
        for the channel passed in.
        """
        assert general_channel.endswith(
            "!"
        ), "receive_loop not called on general queue of process-local channel"
        while True:
            real_channel, message = await self.receive_single(general_channel)
            await self.receive_buffer[real_channel].put(message)

    async def receive_single(self, channel):
        """
        Receives a single message off of the channel and returns it.
        """
        # Check channel name
        assert self.valid_channel_name(channel,
                                       receive=True), "Channel name invalid"
        # Work out the connection to use
        if "!" in channel:
            assert channel.endswith("!")
            index = self.consistent_hash(channel)
        else:
            index = next(self._receive_index_generator)
        # Get that connection and receive off of it
        async with self.connection(index) as connection:
            channel_key = self.prefix + channel
            content = None
            while content is None:
                content = await connection.blpop(channel_key,
                                                 timeout=self.blpop_timeout)
            # Message decode
            message = self.deserialize(content[1])
            # TODO: message expiry?
            # If there is a full channel name stored in the message, unpack it.
            if "__anthill_channel__" in message:
                channel = message["__anthill_channel__"]
                del message["__anthill_channel__"]
            return channel, message

    async def new_channel(self, prefix="specific"):
        """
        Returns a new channel name that can be used by something in our
        process as a specific channel.
        """
        # TODO: Guarantee uniqueness better?
        return "%s.%s!%s" % (
            prefix,
            self.client_prefix,
            "".join(random.choice(string.ascii_letters) for i in range(12)),
        )

    # Flush extension #

    async def flush(self):
        """
        Deletes all messages and groups on all shards.
        """
        # Lua deletion script
        delete_prefix = """
            local keys = redis.call('keys', ARGV[1])
            for i=1,#keys,5000 do
                redis.call('del', unpack(keys, i, math.min(i+4999, #keys)))
            end
        """
        # Go through each connection and remove all with prefix
        for i in range(self.ring_size):
            async with self.connection(i) as connection:
                await connection.eval(delete_prefix,
                                      keys=[],
                                      args=[self.prefix + "*"])

    # Groups extension #

    async def group_add(self, group, channel):
        """
        Adds the channel name to a group.
        """
        # Check the inputs
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        # Get a connection to the right shard
        group_key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            # Add to group sorted set with creation time as timestamp
            await connection.zadd(
                group_key,
                time.time(),
                channel,
            )
            # Set expiration to be group_expiry, since everything in
            # it at this point is guaranteed to expire before that
            await connection.expire(group_key, self.group_expiry)

    async def group_discard(self, group, channel):
        """
        Removes the channel from the named group if it is in the group;
        does nothing otherwise (does not error)
        """
        assert self.valid_group_name(group), "Group name not valid"
        assert self.valid_channel_name(channel), "Channel name not valid"
        key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            await connection.zrem(key, channel)

    async def group_send(self, group, message):
        """
        Sends a message to the entire group.
        """
        assert self.valid_group_name(group), "Group name not valid"
        # Retrieve list of all channel names
        key = self._group_key(group)
        async with self.connection(self.consistent_hash(group)) as connection:
            # Discard old channels based on group_expiry
            await connection.zremrangebyscore(key,
                                              min=0,
                                              max=int(time.time()) -
                                              self.group_expiry)

            # Return current lot
            channel_names = [
                x.decode("utf8") for x in await connection.zrange(key, 0, -1)
            ]

        connection_to_channels, channel_to_message, channel_to_capacity, channel_to_key = \
            self._map_channel_to_connection(channel_names, message)

        for connection_index, channel_redis_keys in connection_to_channels.items(
        ):
            # Create a LUA script specific for this connection.
            # Make sure to use the message specific to this channel, it is
            # stored in channel_to_message dict and contains the
            # __anthill_channel__ key.
            group_send_lua = """
                for i=1,#KEYS do
                    if redis.call('LLEN', KEYS[i]) < tonumber(ARGV[i + #KEYS]) then
                        redis.call('RPUSH', KEYS[i], ARGV[i])
                        redis.call('EXPIRE', KEYS[i], %d)
                    end
                end
            """ % self.expiry

            # We need to filter the messages to keep those related to the connection
            args = [
                channel_to_message[channel_name]
                for channel_name in channel_names
                if channel_to_key[channel_name] in channel_redis_keys
            ]

            # We need to send the capacity for each channel
            args += [
                channel_to_capacity[channel_name]
                for channel_name in channel_names
                if channel_to_key[channel_name] in channel_redis_keys
            ]

            async with self.connection(connection_index) as connection:
                await connection.eval(group_send_lua,
                                      keys=channel_redis_keys,
                                      args=args)

    def _map_channel_to_connection(self, channel_names, message):
        """
        For a list of channel names, bucket each one to a dict keyed by the
        connection index.
        Also for each channel create a message specific to that channel, adding
        the __anthill_channel__ key to the message.
        We also return a mapping from channel names to their corresponding Redis
        keys, and a mapping of channels to their capacity.
        """
        connection_to_channels = collections.defaultdict(list)
        channel_to_message = dict()
        channel_to_capacity = dict()
        channel_to_key = dict()

        for channel in channel_names:
            channel_non_local_name = channel
            if "!" in channel:
                message = dict(message.items())
                message["__anthill_channel__"] = channel
                channel_non_local_name = self.non_local_name(channel)
            channel_key = self.prefix + channel_non_local_name
            idx = self.consistent_hash(channel_non_local_name)
            connection_to_channels[idx].append(channel_key)
            channel_to_capacity[channel] = self.get_capacity(message)
            channel_to_message[channel] = self.serialize(message)
            # We build a
            channel_to_key[channel] = channel_key

        return connection_to_channels, channel_to_message, channel_to_capacity, channel_to_key

    def _group_key(self, group):
        """
        Common function to make the storage key for the group.
        """
        return ("%s:group:%s" % (self.prefix, group)).encode("utf8")

    # Serialization #

    def serialize(self, message):
        """
        Serializes message to a byte string.
        """
        value = msgpack.packb(message, use_bin_type=True)
        if self.crypter:
            value = self.crypter.encrypt(value)
        return value

    def deserialize(self, message):
        """
        Deserializes from a byte string.
        """
        if self.crypter:
            message = self.crypter.decrypt(message, self.expiry + 10)
        return msgpack.unpackb(message, raw=False)

    # Internal functions #

    def consistent_hash(self, value):
        """
        Maps the value to a node value between 0 and 4095
        using CRC, then down to one of the ring nodes.
        """
        if isinstance(value, str):
            value = value.encode("utf8")
        bigval = binascii.crc32(value) & 0xfff
        ring_divisor = 4096 / float(self.ring_size)
        return int(bigval / ring_divisor)

    def make_fernet(self, key):
        """
        Given a single encryption key, returns a Fernet instance using it.
        """
        from cryptography.fernet import Fernet
        if isinstance(key, str):
            key = key.encode("utf8")
        formatted_key = base64.urlsafe_b64encode(hashlib.sha256(key).digest())
        return Fernet(formatted_key)

    def __str__(self):
        return "%s(hosts=%s)" % (self.__class__.__name__, self.hosts)

    # Connection handling #

    def connection(self, index):
        """
        Returns the correct connection for the index given.
        Lazily instantiates pools.
        """
        # Catch bad indexes
        if not 0 <= index < self.ring_size:
            raise ValueError("There are only %s hosts - you asked for %s!" %
                             (self.ring_size, index))
        # Make a context manager
        return self.ConnectionContextManager(self.hosts[index])

    class ConnectionContextManager:
        """
        Async context manager for connections
        """
        def __init__(self, kwargs):
            self.kwargs = kwargs

        async def __aenter__(self):
            self.conn = await aioredis.create_redis(**self.kwargs)
            return self.conn

        async def __aexit__(self, exc_type, exc, tb):
            self.conn.close()