def gen_keys(key_size): try: from jwcrypto.jwk import JWK, JWKSet except ImportError as e: msg = "You have to install jwcrypto to use this function" print(msg) raise ImportError(msg) from e jwk = JWK() jwk.generate_key(generate="RSA", size=key_size) contents = jwk.export_to_pem(private_key=True, password=None) with open("private.pem", "w") as priv_pem_file: priv_pem_file.write(contents.decode("utf8")) contents = jwk.export_to_pem(private_key=False, password=None) with open("public.pem", "w") as priv_pem_file: priv_pem_file.write(contents.decode("utf8")) jwks = JWKSet() jwks.add(jwk) raw = jwks.export(private_keys=True) formatted = json.dumps(json.loads(raw), indent=2) with open("private.json", "w") as priv_jwks_file: priv_jwks_file.write(formatted) raw = jwks.export(private_keys=False) formatted = json.dumps(json.loads(raw), indent=2) with open("public.json", "w") as public_jwks_file: public_jwks_file.write(formatted)
def dump_pem_to_jwks(in_private): try: from jwcrypto.jwk import JWK, JWKSet except ImportError as e: msg = "You have to install jwcrypto to use this function" print(msg) raise ImportError(msg) from e with open(in_private, "rb") as privfile: data = privfile.read() jwk = JWK() jwk.import_from_pem(data) jwks = JWKSet() jwks.add(jwk) raw = jwks.export(private_keys=True) formatted = json.dumps(json.loads(raw), indent=2) with open("private.json", "w") as priv_jwks_file: priv_jwks_file.write(formatted) raw = jwks.export(private_keys=False) formatted = json.dumps(json.loads(raw), indent=2) with open("public.json", "w") as public_jwks_file: public_jwks_file.write(formatted)
def listAll(): repo = KeyRepository(getDb()) docList = repo.fetchAll() jwkset = JWKSet() for keyDoc in docList: jwk = JWK.from_pem(base64.b64decode(keyDoc.publicKey)) # wrong way, no idea how to make it proper :| jwk._params['kid'] = str(keyDoc.id) jwk._params['alg'] = keyDoc.algorithm jwkset.add(jwk) return jwkset.export(private_keys=False, as_dict=True)
def configure(self, opts, changes): if opts["openidc"] != "yes": return path = os.path.join(opts["data_dir"], "openidc") if not os.path.exists(path): os.makedirs(path, 0700) keyfile = os.path.join(path, "openidc.key") keyid = int(time.time()) keyset = JWKSet() # We generate one RSA2048 signing key rsasig = JWK(generate="RSA", size=2048, use="sig", kid="%s-sig" % keyid) keyset.add(rsasig) # We generate one RSA2048 encryption key rsasig = JWK(generate="RSA", size=2048, use="enc", kid="%s-enc" % keyid) keyset.add(rsasig) with open(keyfile, "w") as m: m.write(keyset.export()) proto = "https" url = "%s://%s/%s/openidc/" % (proto, opts["hostname"], opts["instance"]) subject_salt = uuid.uuid4().hex if opts["openidc_subject_salt"]: subject_salt = opts["openidc_subject_salt"] # Add configuration data to database po = PluginObject(*self.pargs) po.name = "openidc" po.wipe_data() po.wipe_config_values() config = { "endpoint url": url, "database url": opts["openidc_dburi"] or opts["database_url"] % {"datadir": opts["data_dir"], "dbname": "openidc"}, "enabled extensions": opts["openidc_extensions"], "idp key file": keyfile, "idp sig key id": "%s-sig" % keyid, "idp subject salt": subject_salt, } po.save_plugin_config(config) # Update global config to add login plugin po.is_enabled = True po.save_enabled_state()
def init(workdir): # Initialize SAML2, since this is quite tricky to get right cert = Certificate(os.path.join(workdir, 'saml2')) cert.generate('certificate', 'ipsilon-quickrun') url = 'http://localhost:8080/' validity = 365 * 5 meta = IdpMetadataGenerator(url, cert, timedelta(validity)) meta.output(os.path.join(workdir, 'saml2', 'metadata.xml')) # Also initalize OpenID Connect keyfile = os.path.join(workdir, 'openidc.key') keyset = JWKSet() # We generate one RSA2048 signing key rsasig = JWK(generate='RSA', size=2048, use='sig', kid='quickstart') keyset.add(rsasig) with open(keyfile, 'w') as m: m.write(keyset.export())
def init(workdir): # Initialize SAML2, since this is quite tricky to get right cert = Certificate(os.path.join(workdir, 'saml2')) cert.generate('certificate', 'ipsilon-quickrun') url = 'http://localhost:8080/idp' validity = 365 * 5 meta = IdpMetadataGenerator(url, cert, timedelta(validity)) meta.output(os.path.join(workdir, 'saml2', 'metadata.xml')) # Also initalize OpenID Connect keyfile = os.path.join(workdir, 'openidc.key') keyset = JWKSet() # We generate one RSA2048 signing key rsasig = JWK(generate='RSA', size=2048, use='sig', kid='quickstart') keyset.add(rsasig) with open(keyfile, 'w') as m: m.write(keyset.export())
#!/usr/bin/python import time import os.path from jwcrypto.jwk import JWK, JWKSet keyid = int(time.time()) keyset = JWKSet() rsasig = JWK(generate='RSA', size=2048, use='sig', kid='%s-sig' % keyid) keyset.add(rsasig) rsasig = JWK(generate='RSA', size=2048, use='enc', kid='%s-enc' % keyid) keyset.add(rsasig) if not os.path.exists('/var/lib/ipsilon/idp/openidc'): os.makedirs('/var/lib/ipsilon/idp/openidc') with open('/var/lib/ipsilon/idp/openidc/openidc.key', 'w') as m: m.write(keyset.export())
def jwks(self) -> str: jwks = JWKSet() jwks.add(self._jwk) return jwks.export(private_keys=False)
def configure(self, opts, changes): if opts['openidc'] != 'yes': return path = os.path.join(opts['data_dir'], 'openidc') if not os.path.exists(path): os.makedirs(path, 0o700) keyfile = os.path.join(path, 'openidc.key') keyid = int(time.time()) keyset = JWKSet() # We generate one RSA2048 signing key rsasig = JWK(generate='RSA', size=2048, use='sig', kid='%s-sig' % keyid) keyset.add(rsasig) # We generate one RSA2048 encryption key rsasig = JWK(generate='RSA', size=2048, use='enc', kid='%s-enc' % keyid) keyset.add(rsasig) with open(keyfile, 'w') as m: m.write(keyset.export()) proto = 'https' url = '%s://%s%s/openidc/' % (proto, opts['hostname'], opts['instanceurl']) subject_salt = uuid.uuid4().hex if opts['openidc_subject_salt']: subject_salt = opts['openidc_subject_salt'] # Add configuration data to database po = PluginObject(*self.pargs) po.name = 'openidc' po.wipe_data() po.wipe_config_values() config = { 'endpoint url': url, 'database url': opts['openidc_dburi'] or opts['database_url'] % { 'datadir': opts['data_dir'], 'dbname': 'openidc' }, 'static database url': opts['openidc_static_dburi'] or opts['database_url'] % { 'datadir': opts['data_dir'], 'dbname': 'openidc.static' }, 'enabled extensions': opts['openidc_extensions'], 'idp key file': keyfile, 'idp sig key id': '%s-sig' % keyid, 'idp subject salt': subject_salt } po.save_plugin_config(config) # Update global config to add login plugin po.is_enabled = True po.save_enabled_state()
class JWTValidator: def __init__(self, jwks_urls: "Optional[Union[str, Collection[str]]]" = None): from jwcrypto.jwk import JWKSet self.jwks_urls = jwks_urls self.keys = JWKSet() self.session = None # type: Optional[ClientSession] async def poll(self, poll_interval: timedelta = DEFAULT_POLL_INTERVAL): """ Periodically check for new keys. This coroutine will NEVER terminate naturally, so it should not be awaited. """ while True: await sleep(poll_interval.total_seconds()) LOG.debug("JWKS poller polling for new keys...") await shield(self._load_new_keys()) LOG.debug("...JWKS poll complete.") async def decode_claims(self, token: str) -> "JWTClaims": from jwcrypto.jwt import JWT LOG.debug("Verifying token: %r", token) jwt = JWT(jwt=token) key = jwt.token.jose_header["kid"] await self.get_key(key) jwt = JWT(jwt=token, key=self.keys) return json.loads(jwt.claims) async def get_key(self, kid: str) -> "Optional[JWK]": """ Retrieve a key for a given ``kid``. If the key could not be found, the keystore is refreshed, and if a key is discovered through that process, it is returned. May return ``None`` if even after refreshing keys from the remote JWKS source, a key for the given ``kid`` could not be found. """ key = self.keys.get_key(kid) if key is None: await shield(self._load_new_keys()) key = self.keys.get_key(kid) return key def export_all_keys(self) -> "str": """ Return a JSON string that formats all public keys currently in the store as JWKS. """ return self.keys.export(private_keys=False) async def _load_new_keys(self): from jwcrypto.jwk import JWK for url in self.jwks_urls: try: if self.session is None: self.session = ClientSession() async with self.session.get(url, allow_redirects=False) as response: jwks_json = await response.json() # ``JWKSet.import_keyset`` suffers from a few critical flaws that make it # unusable for us: # 1) ``import_keyset`` internally adds keys to a set, which is semantically # correct. However, because JWK has no __eq__ or __hash__ implementation, # EVERY key is repeatedly appended to the set rather than duplicates # getting filtered out. # 2) The previous point necessitates that we pre-process the data to filter out # keys that we do not wish to add, thereby requiring us to parse the JSON # and read the payload. ``import_keyset`` expects its argument as a serialized # JSON string, which it promptly parses back into a data structure. # # We process JWKS endpoints ourselves and selectively add keys directly to the # implementation. We are NOT reimplementing ``import_keyset``'s functionality of # carrying additional non-``keys`` fields into the ``JWKSet`` object. We are # generating JWKS data ourselves, and always generate data that contains only the # single top-level property of ``keys`` so this has no impact on us. jwks_keys = jwks_json.get("keys") if jwks_keys is None: LOG.warning( 'The JWKS endpoint did not return a "keys" property, so no new ' "keys were added. This will be retried") continue existing_kids = {k.key_id for k in self.keys} for jwk_dict in jwks_keys: kid = jwk_dict.get("kid") if kid is None: LOG.warning( 'The JWKS endpoint contained a key without a "kid" field. ' "It will be dropped.") elif kid in existing_kids: LOG.debug( "We already know about kid %s, so the new value will be " "ignored.", kid, ) else: jwk = None try: jwk = JWK(**jwk_dict) except Exception: # noqa LOG.exception( f"The JWK identified by {kid} could not be parsed." ) if jwk is not None: try: self.keys.add(jwk) except Exception: # noqa LOG.exception( f"The JWK identified by {kid} could not be added." ) except Exception as ex: # noqa # Do NOT log these with full stack traces because they're actually fairly common, # particularly at startup when user-service has yet to start. Merely logging the # text of the exception without a scary stack trace is sufficient. LOG.warning("Error when checking url %r for new keys: %s", url, ex) async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): if self.session is not None: await self.session.close()