def ValidateCertificateSignature(self, signed_cert, signing_cert): """Given a cert signed by another cert, validates the signature.""" # First the naive way -- note this does not check expiry / use etc. signed = x509.load_der_x509_certificate(der_encoder.encode(signed_cert), default_backend()) signing = x509.load_der_x509_certificate(der_encoder.encode(signing_cert), default_backend()) verifier = signing.public_key().verifier(signed.signature, padding.PKCS1v15(), signed.signature_hash_algorithm) verifier.update(signed.tbs_certificate_bytes) try: verifier.verify() except Exception as e: raise Asn1Error('1: Validation of cert signature failed: {}'.format(e))
def get_certificate(self, certificate): cert = requests.get(certificate) if cert.status_code != 200: raise ClientError('Certificate fetch failed: {}'.format( cert.json()['detail'])) return x509.load_der_x509_certificate(cert.content, backend)
def findCertificateFor( self, subject ): # Defer loading the Certificates from disk until they are requested. # if self._trustedCertificates is None: self._trustedCertificates = [] paths = glob.glob( os.path.join( self._directoryPath, '*.{0}'.format( self._fileExtension ) ) ) for path in paths: with open( path, 'rb' ) as inputFile: data = inputFile.read() if self._fileExtension == '.der': certificate = x509.load_der_x509_certificate( data, backends.default_backend() ) elif self._fileExtension == '.pem': certificate = x509.load_pem_x509_certificate( data, backends.default_backend() ) else: logging.ERROR( 'Unsupported Certificate file extension: {0}.'.format( self._fileExtension ) ) # logging.WARNING( 'The file {0} does not have the specified extension: {1}.'.format( path, self._fileExtension ) ) self._trustedCertificates.append( certificate ) for certificate in self._trustedCertificates: # Is this doing a value comparison? -rds if certificate.subject == subject: return certificate
def main(): tls_name = '77fa5113ab6a532ce2e6901f3bd3351c0db5845e0b1b5fb09907808d._smimecert.getdnsapi.org' if len(sys.argv) == 2: tls_name = sys.argv[1] c = getdns.Context() extensions = { 'dnssec_return_status' : getdns.EXTENSION_TRUE } results = c.general(tls_name, request_type=getdns.RRTYPE_TLSA, extensions=extensions) if results.replies_full['status'] != getdns.RESPSTATUS_GOOD: print 'query status is {0}'.format(results.status) sys.exit(1) else: record = get_first_secure_response(results) cert_record = record['rdata']['certificate_association_data'] try: cert = x509.load_der_x509_certificate(bytes(cert_record), default_backend()) rsakey = cert.public_key() encrypted = rsakey.encrypt("A chunk of text", padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None) ) print encrypted.encode('base64') except Exception as e: print ('Error: {0}'.format(e.message)) sys.exit(1)
def get_cert(email, dump=False): """ Get E-cert from HKPost LDAP server """ # Connect to server server = Server('ldap1.hongkongpost.gov.hk', get_info=ALL) conn = Connection(server, auto_bind=True) conn.start_tls() # Get RSA cert conn.search('o=hongkong post e-cert (personal),c=hk', '(sn='+email+'*)') a = json.loads(conn.entries[-1].entry_to_json())['dn'] OU = a[a.find('OU=')+3:a.find('OU=')+13] conn.search('EMAIL='+email+',OU='+str(OU)+ ',o=hongkong post e-cert (personal),c=hk', '(objectclass=*)', search_scope=LEVEL, dereference_aliases=DEREF_BASE, attributes=[ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES]) cert = conn.entries[0].entry_get_raw_attribute("userCertificate;binary")[0] # Cert info if dump: print(conn.entries[0].entry_get_dn()) print(base64.b64encode(cert)) # get x509 der public pub_key = x509.load_der_x509_certificate(cert, default_backend()).public_key() return pub_key
def test_transports_from_cert(self): provider = MetadataProvider(EMPTY_RESOLVER) cert = x509.load_der_x509_certificate(ATTESTATION_CERT_WITH_TRANSPORT, default_backend()) attestation = provider.get_attestation(cert) assert attestation.transports == Transport.USB | Transport.NFC
def scan(self, offset=0, maxlen=None): for hit in super(CertScanner, self).scan(offset=offset, maxlen=maxlen): signature = self.address_space.read(hit + 4, 3) size = self.profile.Object( "unsigned be short", offset=hit+2, vm=self.address_space) description = None if signature.startswith(b"\x30\x82"): data = self.address_space.read(hit, size + 4) if x509: try: cert = x509.load_der_x509_certificate(data, default_backend()) description = dict(( attr.oid._name, attr.value) for attr in cert.subject) except Exception: pass yield hit, "X509", data, description elif signature.startswith(b"\x02\x01\x00"): data = self.address_space.read(hit, size + 4) if x509: try: pem = (b"-----BEGIN RSA PRIVATE KEY-----\n" + base64.b64encode(data) + b"-----END RSA PRIVATE KEY-----") key = serialization.load_pem_private_key( pem, password=None, backend=default_backend()) description = "" except Exception: pass yield hit, "RSA", data, description
def verify_hit(self, hit, address_space): signature = address_space.read(hit + 4, 3) size = self.profile.Object( "unsigned be short", offset=hit+2, vm=address_space) description = None if signature.startswith(b"\x30\x82"): data = address_space.read(hit, size + 4) if x509: try: cert = x509.load_der_x509_certificate(data, default_backend()) description = dict(( attr.oid._name, attr.value) for attr in cert.subject) except Exception: pass return "X509", data, description elif signature.startswith(b"\x02\x01\x00"): data = address_space.read(hit, size + 4) if x509: try: pem = (b"-----BEGIN RSA PRIVATE KEY-----\n" + base64.b64encode(data) + b"-----END RSA PRIVATE KEY-----") key = serialization.load_pem_private_key( pem, password=None, backend=default_backend()) description = "" except Exception: pass return "RSA", data, description return None, None, None
def token_signature_key(self): cache_alias = self.get_setting("token_signature_key_cache_alias", DEFAULT_CACHE_ALIAS) cache = caches[cache_alias] cache_key = ":".join([ "allauth_adfs", "ADFSOAuth2Adapter", md5(self.federation_metadata_url).hexdigest(), "token_signature_key", ]) pub = cache.get(cache_key) if pub is None: xml = self.federation_metadata_xml signature = xml.getElementsByTagName("ds:Signature")[0] cert_b64 = signature.getElementsByTagName("X509Certificate")[0].firstChild.nodeValue cert_str = decode_payload_segment(cert_b64) cert_obj = load_der_x509_certificate(cert_str, default_backend()) pub = cert_obj.public_key().public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo, ) timeout = self.get_setting("token_signature_key_cache_timeout", 0, required=False) cache.set(cache_key, pub, timeout) return pub
def complete_login(self, request, app, token, **kwargs): cert_der = base64.b64decode(self.cert) x509_cert = x509.load_der_x509_certificate(cert_der, backend=x509_backend) jwt_token = jwt.decode(token.token, key=x509_cert.public_key(), leeway=10, options={'verify_aud': False}) data = self.clean_attributes(jwt_token) return self.get_provider().sociallogin_from_response(request, data)
def on_get(self, req, resp): ids = req.get_param("ids") if isinstance(ids, str): ids = ids.split(",") codes = set([j for j in ids if re.match("[3-6]\d{10}", j)]) if not codes: raise falcon.HTTPBadRequest("No id codes specified") users = dict() for serial, esteid, digiid in self.get_certificates(ids): cert = x509.load_der_x509_certificate(esteid, default_backend()) common_name, = cert.subject.get_attributes_for_oid(NameOID.COMMON_NAME) given_name, = cert.subject.get_attributes_for_oid(NameOID.GIVEN_NAME) surname, = cert.subject.get_attributes_for_oid(NameOID.SURNAME) ext = cert.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME) mail, = ext.value.get_values_for_type(x509.RFC822Name) slug = (given_name.value[0] + surname.value).replace("-", "").replace(" ", "") username = unicodedata.normalize("NFKD", slug.lower()).encode("ascii", "ignore") century = str((int(serial[0])-1) // 2 + 18) users[serial] = dict( gender="M" if serial[0] in "13579" else "F", birthday = datetime.strptime(century + serial[1:7], "%Y%m%d").date(), cn=common_name.value, gn=given_name.value.title(), sn=surname.value.title(), mail=mail, name=username, certificates = [b64encode(j) for j in (esteid,digiid) if j], ) return users
def read_certificate(self, slot): data = _parse_tlv_dict(self.get_data(OBJ.from_slot(slot))) if TAG.CERT_INFO in data: # Not available in attestation slot if data[TAG.CERT_INFO] != b'\0': raise ValueError('Compressed certificates are not supported!') return x509.load_der_x509_certificate(data[TAG.CERTIFICATE], default_backend())
def got_cert(certr): objects.append( Certificate( x509.load_der_x509_certificate( certr.body, default_backend()) .public_bytes(serialization.Encoding.PEM))) return certr
def bind(user, version, registration_data, client_data, description=''): security_user = SecurityUser.from_user(user, current_app.private_userdb) enrollment_data = session.pop('_u2f_enroll_', None) if not enrollment_data: current_app.logger.error('Found no U2F enrollment data in session.') return {'_error': True, 'message': 'security.u2f.missing_enrollment_data'} data = { 'version': version, 'registrationData': registration_data, 'clientData': client_data } device, der_cert = complete_registration(enrollment_data, data, current_app.config['U2F_FACETS']) cert = x509.load_der_x509_certificate(der_cert, default_backend()) pem_cert = crypto.dump_certificate(crypto.FILETYPE_PEM, cert) if not isinstance(pem_cert, six.string_types): pem_cert = pem_cert.decode('utf-8') u2f_token = U2F(version=device['version'], keyhandle=device['keyHandle'], app_id=device['appId'], public_key=device['publicKey'], attest_cert=pem_cert, description=description, application='eduid_security', created_ts=True) security_user.credentials.add(u2f_token) save_and_sync_user(security_user) current_app.stats.count(name='u2f_token_bind') return { 'message': 'security.u2f_register_success', 'credentials': compile_credential_list(security_user) }
def getThumbprintFromx509(): ### # cryptography library ### filename = "/home/dougie/onedrive/orion/security/jwt/cert.der" cert = x509.load_der_x509_certificate(data=open(filename, "rb").read(), backend=default_backend()) # Issuer orgName = cert.issuer.get_attributes_for_oid(NameOID.ORGANIZATION_NAME) #organizationName print("Issuer: {}".format(orgName[0].value)) # SHA1 fingerprint (byte array) fp = cert.fingerprint(hashes.SHA1()) # Convert from Binary to Hex String raw_fp = binascii.hexlify(fp) # Convert hex to string fingerprint = raw_fp.decode('utf-8') #WS02 method of encoding the fingerprint print("SHA1 -crypto thumbprint: {}".format(fingerprint)) #hex encoded fingerprint_enc = base64.b64encode(fingerprint.encode()) print("SHA1 -crypto (encoded wso2): {}".format(fingerprint_enc.decode('utf-8'))) # Base64 URL Encoded fingerprint (x5t) # Could use one of the following wso2 or base64 url encoded der # base64.b64encode(fingerprint.encode()) # fingerprint_bytes_enc.decode('utf-8').rstrip('=') fingerprint_bytes_enc = base64.urlsafe_b64encode(fp) x5t = fingerprint_bytes_enc.decode('utf-8').rstrip('=') print("SHA1 -crypto (encoded)",x5t)
def load_certificate(path): _, ext = os.path.splitext(path) with open(path, "rb") as f: if ext == ".pem": return x509.load_pem_x509_certificate(f.read(), default_backend()) else: return x509.load_der_x509_certificate(f.read(), default_backend())
def test_provider(self): provider = MetadataProvider(YUBICO_RESOLVER) cert = x509.load_der_x509_certificate(ATTESTATION_CERT, default_backend()) attestation = provider.get_attestation(cert) assert attestation.trusted
def test_sct_embedding(): if not os.environ.get('BOULDER_CONFIG_DIR', '').startswith("test/config-next"): return certr, authzs = auth_and_issue([random_domain()]) certBytes = urllib2.urlopen(certr.uri).read() cert = x509.load_der_x509_certificate(certBytes, default_backend()) # make sure there is no poison extension try: cert.extensions.get_extension_for_oid(x509.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")) raise Exception("certificate contains CT poison extension") except x509.ExtensionNotFound: # do nothing pass # make sure there is a SCT list extension try: sctList = cert.extensions.get_extension_for_oid(x509.ObjectIdentifier("1.3.6.1.4.1.11129.2.4.2")) except x509.ExtensionNotFound: raise Exception("certificate doesn't contain SCT list extension") if len(sctList.value) != 2: raise Exception("SCT list contains wrong number of SCTs") for sct in sctList.value: if sct.version != x509.certificate_transparency.Version.v1: raise Exception("SCT contains wrong version") if sct.entry_type != x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE: raise Exception("SCT contains wrong entry type") delta = sct.timestamp - datetime.datetime.now() if abs(delta) > datetime.timedelta(hours=1): raise Exception("Delta between SCT timestamp and now was too great " "%s vs %s (%s)" % (sct.timestamp, datetime.datetime.now(), delta))
def verify_sig(self, encoded_cert): cert = x509.load_der_x509_certificate(encoded_cert, default_backend()) crypto.verify( self.ca.cert, cert.signature, cert.tbs_certificate_bytes, 'sha256')
def b642cert(data): """ Return cert_dict similar to old rsa_x509_pem backend. Shouldn't be used by new code. @param data The certificate as base64 string (i.e. pem without header/footer) """ cert = load_der_x509_certificate(standard_b64decode(data), backend=default_backend()) return _cert2dict(cert)
def add_device(self, bind_data, cert_der, transports=0): cert = x509.load_der_x509_certificate(cert_der, default_backend()) certificate = db.session.query(Certificate) \ .filter(Certificate.fingerprint == _calculate_fingerprint(cert)) \ .first() if certificate is None: certificate = Certificate(cert) return Device(self, bind_data, certificate, transports)
def _cert_fingerprint(cert_pem): if "-----BEGIN CERTIFICATE" in cert_pem: cert = load_pem_x509_certificate(cert_pem, backend=default_backend()) else: cert = load_der_x509_certificate(base64.standard_b64decode(cert_pem), backend=default_backend()) fingerprint = hexlify(cert.fingerprint(hashes.SHA1())).lower().decode('ascii') fingerprint = ":".join([fingerprint[x:x + 2] for x in xrange(0, len(fingerprint), 2)]) return fingerprint, cert
def resolve(self, cert): if isinstance(cert, bytes): cert = x509.load_der_x509_certificate(cert, default_backend()) issuer = cert.issuer \ .get_attributes_for_oid(NameOID.COMMON_NAME)[0].value for issuer in self._certs.get(issuer, []): if self._verify_cert(cert, issuer.public_key()): return self._metadata[issuer] return None
def get_user_details(self, response): leeway = self.setting('LEEWAY', self.LEEWAY) cert_der = base64.b64decode(self.cert) x509_cert = x509.load_der_x509_certificate(cert_der, backend=x509_backend) jwt_token = jwt.decode(response['access_token'], key=x509_cert.public_key(), leeway=leeway, options={ 'verify_aud': False }) return self.clean_attributes(jwt_token)
def load_der_x509_certificate(data): """ Load an X.509 certificate in DER format. :returns: a ``IPACertificate`` object. :raises: ``ValueError`` if unable to load the certificate. """ return IPACertificate( crypto_x509.load_der_x509_certificate(data, backend=default_backend()) )
def get_attestation(self, device_or_cert): if isinstance(device_or_cert, Device): device = device_or_cert if device.certificate_id not in self._cache: cert = x509.load_der_x509_certificate(device.certificate.der, default_backend()) attestation = self._provider.get_attestation(cert) self._cache[device.certificate_id] = attestation return self._cache[device.certificate_id] else: return self._provider.get_attestation(device_or_cert)
def _get_client_identity(self): certificate_data = self._connection.getpeercert(binary_form=True) try: cert = x509.load_der_x509_certificate( certificate_data, backends.default_backend() ) except Exception: # This should never get raised "in theory," as the ssl socket # should fail to connect non-TLS connections before the session # gets created. This is a failsafe in case that protection fails. raise exceptions.PermissionDenied( "Failure loading the client certificate from the session " "connection. Could not retrieve client identity." ) if self._enable_tls_client_auth: try: extended_key_usage = cert.extensions.get_extension_for_oid( x509.oid.ExtensionOID.EXTENDED_KEY_USAGE ).value except x509.ExtensionNotFound: raise exceptions.PermissionDenied( "The extended key usage extension is missing from the " "client certificate. Session client identity unavailable." ) if x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH not in \ extended_key_usage: raise exceptions.PermissionDenied( "The extended key usage extension is not marked for " "client authentication in the client certificate. Session " "client identity unavailable." ) client_identities = cert.subject.get_attributes_for_oid( x509.oid.NameOID.COMMON_NAME ) if len(client_identities) > 0: if len(client_identities) > 1: self._logger.warning( "Multiple client identities found. Using the first " "one processed." ) client_identity = client_identities[0].value self._logger.info( "Session client identity: {0}".format(client_identity) ) return client_identity else: raise exceptions.PermissionDenied( "The client certificate does not define a subject common " "name. Session client identity unavailable." )
def test_versioning_older(self): resolver = create_resolver(YUBICO) newer = json.loads(json.dumps(YUBICO)) newer['trustedCertificates'] = [] resolver.add_metadata(newer) cert = x509.load_der_x509_certificate(ATTESTATION_CERT, default_backend()) metadata = resolver.resolve(cert) assert metadata.identifier == '2fb54029-7613-4f1d-94f1-fb876c14a6fe'
def _data_from_options(self, **options): issuer = None subject = None if not self.certmapdata_options & set(options): raise RequirementError(name=u'certmapdata') if ({u'issuer', u'subject'} & set(options) and {u'ipacertmapdata', u'certificate'} & set(options)): raise MutuallyExclusiveError(reason=u'Mutually exclusive options ' u'provided at the same time.') if u'issuer' in options and u'subject' not in options: raise RequirementError(name=u'subject') if u'subject' in options and u'issuer' not in options: raise RequirementError(name=u'issuer') if {u'ipacertmapdata', u'certificate'} & set(options): try: data = options[u'ipacertmapdata'] except KeyError: data = [] else: if not isinstance(data, list): data = [data] try: certs = options[u'certificate'] except KeyError: certs = [] else: if not isinstance(certs, list): certs = [certs] for cert in certs: cert = x509.load_der_x509_certificate( base64.b64decode(cert), backend=default_backend() ) issuer = DN(cert.issuer).x500_text() subject = DN(cert.subject).x500_text() data.append( u'X509:<I>{i}<S>{s}'.format(i=issuer, s=subject) ) else: issuer = DN(options[u'issuer']).x500_text() subject = DN(options[u'subject']).x500_text() data = [u'X509:<I>{i}<S>{s}'.format(i=issuer, s=subject)] return set(data)
def test_versioning_newer(self): resolver = create_resolver(YUBICO) newer = json.loads(json.dumps(YUBICO)) newer['version'] = newer['version'] + 1 newer['trustedCertificates'] = [] resolver.add_metadata(newer) cert = x509.load_der_x509_certificate(ATTESTATION_CERT, default_backend()) metadata = resolver.resolve(cert) assert metadata is None
def read_tshark(self, path): """Read TCP and UDP packets from file given by path. Parameters ---------- path : string Path to .pcap file to read. Returns ------- result : np.array of shape=(n_packets, n_features) Where features consist of: [0]: Filename of capture [1]: Protocol TCP/UDP [2]: TCP/UDP stream identifier [3]: Timestamp of packet [4]: Length of packet [5]: IP packet source [6]: IP packet destination [7]: TCP/UDP packet source port [8]: TCP/UDP packet destination port [9]: SSL/TLS certificate if exists, else None """ # Create Tshark command command = ["tshark", "-r", path, "-Tfields", "-e", "frame.time_epoch", "-e", "tcp.stream", "-e", "udp.stream", "-e", "ip.proto", "-e", "ip.src", "-e", "tcp.srcport", "-e", "udp.srcport", "-e", "ip.dst", "-e", "tcp.dstport", "-e", "udp.dstport", "-e", "ip.len", "-e", "ssl.handshake.certificate"] # Initialise result result = list() # Call Tshark on packets process = Popen(command, stdout=PIPE, stderr=PIPE) # Get output out, err = process.communicate() # Read each packet for packet in filter(None, out.decode('utf-8').split('\n')): # Get all data from packets packet = packet.split() # Perform check on packets if len(packet) < 8: continue # Perform check on multiple ip addresses packet[3] = packet[3].split(',')[0] packet[5] = packet[5].split(',')[0] packet[7] = packet[7].replace(',', '') # Parse certificate if len(packet) > 8: # Get first certificate cert = packet[8].split(',')[0] # Transform to hex cert = bytes.fromhex(cert.replace(':', '')) # Read as certificate cert = x509.load_der_x509_certificate(cert, default_backend()) # Set packet as serial number packet[8] = cert.serial_number else: packet.append(None) # Add packet to result result.append([path] + packet) # Get result as numpy array result = np.asarray(result) # Check if any items exist if not result.shape[0]: return np.zeros((0, 8), dtype=object) # Change protocol number to text protocols = {'17': 'udp', '6': 'tcp'} result[:, 3] = [protocols.get(x, 'unknown') for x in result[:, 3]] # Return in original order return result[:, [0, 3, 2, 1, 8, 4, 6, 5, 7, 9]]
def read_pyshark(self, path): """Read TCP and UDP packets from file given by path. Parameters ---------- path : string Path to .pcap file to read. Returns ------- result : np.array of shape=(n_packets, n_features) Where features consist of: [0]: Filename of capture [1]: Protocol TCP/UDP [2]: TCP/UDP stream identifier [3]: Timestamp of packet [4]: Length of packet [5]: IP packet source [6]: IP packet destination [7]: TCP/UDP packet source port [8]: TCP/UDP packet destination port [9]: SSL/TLS certificate if exists, else None """ # If verbose, print which file is currently being read if self.verbose: counter_a = 0 counter_b = 0 # Read pcap file pcap = iter(pyshark.FileCapture(path)) # Initialise result result = list() # Loop over packets while True: try: packet = next(pcap) except: break if not ("TCP" in packet or "UDP" in packet): counter_b += 1 continue if self.verbose: counter_a += 1 counter_b += 1 print("Reading {}... {}/{} packets".format(path, counter_a, counter_b), end='\r') # Get required packet data d = [path, packet.layers[2].layer_name, # Get packet.layers[2].stream, # Get stream ID packet.sniff_timestamp, # Get packet timestamp packet.length, # Get packet length packet.layers[1].src, # Get source IP or IPv6 (fixed) packet.layers[1].dst, # Get destination IP or IPv6 (fixed) packet.layers[2].srcport, # Get source port packet.layers[2].dstport, # Get destination port None] # Check whether SSL/TLS certificate is in packet if "SSL" in packet and\ packet.ssl.get("handshake_certificate") is not None: # Get certificate cert = packet.ssl.get('handshake_certificate') # Parse cert to bytes cert = bytes.fromhex(cert.replace(':', '')) # Parse x509 certificate as DER cert = x509.load_der_x509_certificate(cert, default_backend()) # Get serial number - TODO extend with other features? d[-1] = cert.serial_number # Append data item to result result.append(d) # Close capture pcap.close() if self.verbose: print() # Return result as numpy array return np.array(result)
def handle(self, name, key, pem, **options): if not os.path.exists(ca_settings.CA_DIR): try: with wrap_file_exceptions(): os.makedirs(ca_settings.CA_DIR) except PermissionError: pem.close() key.close() raise CommandError( '%s: Could not create CA_DIR: Permission denied.' % ca_settings.CA_DIR) # FileNotFoundError shouldn't happen, whole point of this block is to create it password = options['password'] import_password = options['import_password'] parent = options['parent'] pem_data = pem.read() key_data = key.read() crl_url = '\n'.join(options['crl_url']) # close reader objects (otherwise we get a ResourceWarning) key.close() pem.close() ca = CertificateAuthority(name=name, parent=parent, issuer_url=options['issuer_url'], issuer_alt_name=options['issuer_alt_name'], crl_url=crl_url) # load public key try: pem_loaded = x509.load_pem_x509_certificate( pem_data, default_backend()) except Exception: try: pem_loaded = x509.load_der_x509_certificate( pem_data, default_backend()) except Exception: raise CommandError('Unable to load public key.') ca.x509 = pem_loaded ca.private_key_path = ca_storage.generate_filename( '%s.key' % ca.serial.replace(':', '')) # load private key try: key_loaded = serialization.load_pem_private_key( key_data, import_password, default_backend()) except Exception: try: key_loaded = serialization.load_der_private_key( key_data, import_password, default_backend()) except Exception: raise CommandError('Unable to load private key.') if password is None: encryption = serialization.NoEncryption() else: encryption = serialization.BestAvailableEncryption(password) # write private key to file pem = key_loaded.private_bytes(encoding=Encoding.PEM, format=PrivateFormat.TraditionalOpenSSL, encryption_algorithm=encryption) perm_denied = '%s: Permission denied: Could not open file for writing' % ca.private_key_path try: with wrap_file_exceptions(): ca_storage.save(ca.private_key_path, ContentFile(pem)) except PermissionError: raise CommandError(perm_denied) # Only save CA to database if we loaded all data and copied private key ca.save()
def load_certificate(cert_data): cert = x509.load_der_x509_certificate(cert_data, default_backend()) return cert
def verify_android_key( *, attestation_statement: AttestationStatement, attestation_object: bytes, client_data_json: bytes, credential_public_key: bytes, pem_root_certs_bytes: List[bytes], ) -> bool: """Verify an "android-key" attestation statement See https://www.w3.org/TR/webauthn-2/#sctn-android-key-attestation Also referenced: https://source.android.com/security/keystore/attestation """ if not attestation_statement.sig: raise InvalidRegistrationResponse( "Attestation statement was missing signature (Android Key)") if not attestation_statement.alg: raise InvalidRegistrationResponse( "Attestation statement was missing algorithm (Android Key)") if not attestation_statement.x5c: raise InvalidRegistrationResponse( "Attestation statement was missing x5c (Android Key)") # Validate certificate chain try: # Include known root certificates for this attestation format pem_root_certs_bytes.append(google_hardware_attestation_root_1) pem_root_certs_bytes.append(google_hardware_attestation_root_2) validate_certificate_chain( x5c=attestation_statement.x5c, pem_root_certs_bytes=pem_root_certs_bytes, ) except InvalidCertificateChain as err: raise InvalidRegistrationResponse(f"{err} (Android Key)") # Extract attStmt bytes from attestation_object attestation_dict = cbor2.loads(attestation_object) authenticator_data_bytes = attestation_dict["authData"] # Generate a hash of client_data_json client_data_hash = hashlib.sha256() client_data_hash.update(client_data_json) client_data_hash_bytes = client_data_hash.digest() verification_data = b"".join([ authenticator_data_bytes, client_data_hash_bytes, ]) # Verify that sig is a valid signature over the concatenation of authenticatorData # and clientDataHash using the public key in the first certificate in x5c with the # algorithm specified in alg. attestation_cert_bytes = attestation_statement.x5c[0] attestation_cert = x509.load_der_x509_certificate(attestation_cert_bytes, default_backend()) attestation_cert_pub_key = attestation_cert.public_key() try: verify_signature( public_key=attestation_cert_pub_key, signature_alg=attestation_statement.alg, signature=attestation_statement.sig, data=verification_data, ) except InvalidSignature: raise InvalidRegistrationResponse( "Could not verify attestation statement signature (Android Key)") # Verify that the public key in the first certificate in x5c matches the # credentialPublicKey in the attestedCredentialData in authenticatorData. attestation_cert_pub_key_bytes = attestation_cert_pub_key.public_bytes( Encoding.DER, PublicFormat.SubjectPublicKeyInfo, ) # Convert our raw public key bytes into the same format cryptography generates for # the cert subject key decoded_pub_key = decode_credential_public_key(credential_public_key) pub_key_crypto = decoded_public_key_to_cryptography(decoded_pub_key) pub_key_crypto_bytes = pub_key_crypto.public_bytes( Encoding.DER, PublicFormat.SubjectPublicKeyInfo, ) if attestation_cert_pub_key_bytes != pub_key_crypto_bytes: raise InvalidRegistrationResponse( "Certificate public key did not match credential public key (Android Key)" ) # Verify that the attestationChallenge field in the attestation certificate # extension data is identical to clientDataHash. ext_key_description_oid = "1.3.6.1.4.1.11129.2.1.17" try: cert_extensions = attestation_cert.extensions ext_key_description: Extension = cert_extensions.get_extension_for_oid( ObjectIdentifier(ext_key_description_oid)) except ExtensionNotFound: raise InvalidRegistrationResponse( f"Certificate missing extension {ext_key_description_oid} (Android Key)" ) # Peel apart the Extension into an UnrecognizedExtension, then the bytes we actually # want ext_value_wrapper: UnrecognizedExtension = ext_key_description.value ext_value: bytes = ext_value_wrapper.value parsed_ext = KeyDescription.load(ext_value) # Verify the following using the appropriate authorization list from the attestation # certificate extension data: software_enforced: AuthorizationList = parsed_ext["softwareEnforced"] tee_enforced: AuthorizationList = parsed_ext["teeEnforced"] # The AuthorizationList.allApplications field is not present on either authorization # list (softwareEnforced nor teeEnforced), since PublicKeyCredential MUST be scoped # to the RP ID. if software_enforced["allApplications"].native is not None: raise InvalidRegistrationResponse( "allApplications field was present in softwareEnforced (Android Key)" ) if tee_enforced["allApplications"].native is not None: raise InvalidRegistrationResponse( "allApplications field was present in teeEnforced (Android Key)") # The value in the AuthorizationList.origin field is equal to KM_ORIGIN_GENERATED. origin = tee_enforced["origin"].native if origin != KeyOrigin.GENERATED: raise InvalidRegistrationResponse( f"teeEnforced.origin {origin} was not {KeyOrigin.GENERATED}") # The value in the AuthorizationList.purpose field is equal to KM_PURPOSE_SIGN. purpose = tee_enforced["purpose"].native if purpose != [KeyPurpose.SIGN]: raise InvalidRegistrationResponse( f"teeEnforced.purpose {purpose} was not [{KeyPurpose.SIGN}]") return True
def parse_leaf_cert_crypto(leaf_cert): #print(leaf_cert) crypto_x509_cert = x509.load_der_x509_certificate(leaf_cert,default_backend()) print(crypto_x509_cert.serial_number) print(crypto_x509_cert.public_key)
print('Label:', attr['CKA_LABEL']) private_key = session.findObjects([ (PyKCS11.CKA_CLASS, PyKCS11.CKO_PRIVATE_KEY), (PyKCS11.CKA_LABEL, 'CITIZEN AUTHENTICATION KEY') ])[0] mechanism = PyKCS11.Mechanism(PyKCS11.CKM_SHA1_RSA_PKCS, None) text = b'text to sign' signature = bytes(session.sign(private_key, text, mechanism)) print(signature) # cert = session.findObjects([ # (PyKCS11.CKA_CLASS, PyKCS11.CKO_PRIVATE_KEY), # (PyKCS11.CKA_LABEL,'CITIZEN AUTHENTICATION CERTIFICATE')] # )[0] cert = x509.load_der_x509_certificate(bytes(attr['CKA_VALUE']), default_backend()) print(cert) print(cert.public_bytes(encoding=serialization.Encoding.PEM)) with open('cc_cert.pem', 'wb') as f: f.write(cert.public_bytes(encoding=serialization.Encoding.PEM)) print() print(cert.public_key().public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo))
def setUp(self): der, msg = pem_to_der(os.getenv('CERT_FILE', 'crt.pem')) if der: self.cert = x509.load_der_x509_certificate(der) else: self.fail(msg)
def attest(self, slot): return x509.load_der_x509_certificate(self.send_cmd(INS.ATTEST, slot), default_backend())
def local_cert_loader(cert): backend = default_backend() if b'-----BEGIN CERTIFICATE-----' in cert: return load_pem_x509_certificate(cert, backend) return load_der_x509_certificate(cert, backend)
def __init__(self, allow_rooted=False, ca=_GSR2_DER): self.allow_rooted = allow_rooted self._ca = x509.load_der_x509_certificate(ca, default_backend())
def main(uuid_c): print("|--------------------------------------|") print("| SECURE MEDIA CLIENT |") print("|--------------------------------------|\n") # Get a list of media files print("Contacting Server") CLIENT_CIPHERSUITS = ["AES256_CBC_SHA256", "AES256_CBC_SHA512", "AES256_GCM_SHA256", "AES256_GCM_SHA512", "ChaCha20_None_SHA256", "ChaCha20_None_SHA512"] lib ='/usr/local/lib/libpteidpkcs11.so' pkcs11 = PyKCS11.PyKCS11Lib() pkcs11.load(lib) slots = pkcs11.getSlotList() slot = slots[0] session = pkcs11.openSession(slot) obj = session.findObjects([(PyKCS11.CKA_CLASS, PyKCS11.CKO_CERTIFICATE), (PyKCS11.CKA_LABEL, 'CITIZEN AUTHENTICATION CERTIFICATE')])[0] all_atributes = [PyKCS11.CKA_VALUE] attributes = session.getAttributeValue(obj, all_atributes)[0] cert = x509.load_der_x509_certificate(bytes(attributes)) cc_cert_pem = cert.public_bytes(encoding=serialization.Encoding.PEM) cc_private_key = session.findObjects([( PyKCS11.CKA_CLASS, PyKCS11.CKO_PRIVATE_KEY), (PyKCS11.CKA_LABEL,'CITIZEN AUTHENTICATION KEY')])[0] mechanism = PyKCS11.Mechanism(PyKCS11.CKM_SHA1_RSA_PKCS, None) data = {"uuid": uuid_c, "client_ciphersuits": CLIENT_CIPHERSUITS, "cc_cert": cc_cert_pem.decode('latin')} data = json.dumps(data) signature = bytes(session.sign(cc_private_key, data, mechanism)) payload = {"data": data, "signature": base64.b64encode(signature).decode('latin')} req = requests.get(f'{SERVER_URL}/api/protocols', data= json.dumps(payload)) req = req.json() data_signed = json.loads(req["data"]) algorithms_modes_digests = data_signed["ciphersuit"].split("_") algorithm = algorithms_modes_digests[0] mode = algorithms_modes_digests[1] digest_c = algorithms_modes_digests[2] signature = base64.b64decode(req["signature"].encode()) with open("Certification_Authority.crt", "rb") as CA_cert_file: CA_cert = x509.load_pem_x509_certificate(CA_cert_file.read()) CA_public_key = CA_cert.public_key() server_cert = x509.load_pem_x509_certificate(data_signed["server_cert"].encode()) server_public_key_rsa = server_cert.public_key() #Verificar o certificado CA_public_key.verify( server_cert.signature, server_cert.tbs_certificate_bytes, paddingAsymetric.PKCS1v15(), server_cert.signature_hash_algorithm, ) #verificar assinatura if digest_c == "SHA256": server_public_key_rsa.verify( signature, req["data"].encode(), paddingAsymetric.PSS( mgf=paddingAsymetric.MGF1(hashes.SHA256()), salt_length=paddingAsymetric.PSS.MAX_LENGTH ), hashes.SHA256() ) elif digest_c == "SHA512": server_public_key_rsa.verify( signature, req["data"].encode(), paddingAsymetric.PSS( mgf=paddingAsymetric.MGF1(hashes.SHA512()), salt_length=paddingAsymetric.PSS.MAX_LENGTH ), hashes.SHA512() ) else: print("Erro") sys.exit(0) #Certificados do client e private key with open("Client_Certificate.pem", "rb") as key_file: client_cert_private_key = serialization.load_pem_private_key( key_file.read(), password=None, ) with open("Client_Certificate.crt", "rb") as cert_file: client_cert = cert_file.read() data = json.dumps({ "uuid_c": uuid_c, "client_cert": client_cert.decode('latin') }) signature = client_sign(digest_c, data) payload = { "data": data, "signature": base64.b64encode(signature).decode('latin')} req = requests.get(f'{SERVER_URL}/api/key', data=json.dumps(payload)) req = req.json() signature = base64.b64decode(req["signature"].encode()) message = req["message"].encode() req = json.loads(message) #Verificar o certificado CA_public_key.verify( server_cert.signature, server_cert.tbs_certificate_bytes, paddingAsymetric.PKCS1v15(), server_cert.signature_hash_algorithm, ) #Verificar a assinatura if digest_c == "SHA256": server_public_key_rsa.verify( signature, message, paddingAsymetric.PSS( mgf=paddingAsymetric.MGF1(hashes.SHA256()), salt_length=paddingAsymetric.PSS.MAX_LENGTH ), hashes.SHA256() ) elif digest_c == "SHA512": server_public_key_rsa.verify( signature, message, paddingAsymetric.PSS( mgf=paddingAsymetric.MGF1(hashes.SHA512()), salt_length=paddingAsymetric.PSS.MAX_LENGTH ), hashes.SHA512() ) else: print("Erro") sys.exit(0) parameters_pem = req["parameters"].encode() server_pub_key_pem = req["server_pub_key"].encode() server_pub_key = load_pem_public_key(server_pub_key_pem) parameters = load_pem_parameters(parameters_pem) client_private_key = parameters.generate_private_key() client_pub_key_pem = client_private_key.public_key().public_bytes( encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo ) client_shared_key = client_private_key.exchange(server_pub_key) if digest_c == "SHA256": shared_key_derived = HKDF( algorithm=hashes.SHA256(), length=32, salt=None, info=b'handshake data', ).derive(client_shared_key) elif digest_c == "SHA512": shared_key_derived = HKDF( algorithm=hashes.SHA512(), length=32, salt=None, info=b'handshake data', ).derive(client_shared_key) else: print("Erro ao derivar a shared key") sys.exit(0) data = {"uuid": uuid_c, "client_pub_key": client_pub_key_pem.decode('utf-8')} data = json.dumps(data) signature = client_sign(digest_c, data) payload = {"data": data, "signature": base64.b64encode(signature).decode('latin') } req = requests.post(url=f'{SERVER_URL}/api/shared_key', data=json.dumps(payload)) req = requests.get(f'{SERVER_URL}/api/list') if req.status_code == 200: print("Got Server List") media_list = req.json() # Present a simple selection menu idx = 0 print("MEDIA CATALOG\n") for item in media_list: print(f'{idx} - {media_list[idx]["name"]}') print("----") while True: selection = input("Select a media file number (q to quit): ") if selection.strip() == 'q': sys.exit(0) if not selection.isdigit(): continue selection = int(selection) if 0 <= selection < len(media_list): break # Example: Download first file media_item = media_list[selection] print(f"Playing {media_item['name']}") # Detect if we are running on Windows or Linux # You need to have ffplay or ffplay.exe in the current folder # In alternative, provide the full path to the executable if os.name == 'nt': proc = subprocess.Popen(['ffplay.exe', '-i', '-'], stdin=subprocess.PIPE) else: proc = subprocess.Popen(['ffplay', '-i', '-'], stdin=subprocess.PIPE) # Get data from server and send it to the ffplay stdin through a pipe for chunk in range(media_item['chunks'] + 1): if algorithm == "AES256": if mode == "CBC": media_id, iv = encrypt_AES(shared_key_derived, media_item["id"].encode(), "CBC") iv = base64.b64encode(iv).decode('latin') chunk, iv2 = encrypt_AES(shared_key_derived, str(chunk).encode(), "CBC") iv2 = base64.b64encode(iv2).decode('latin') info = json.dumps({"uuid": uuid_c, "iv": iv, "iv2": iv2}) elif mode == "GCM": media_id, iv, tag1 = encrypt_AES(shared_key_derived, media_item["id"].encode(), "GCM") iv = base64.b64encode(iv).decode('latin') tag1 = base64.b64encode(tag1).decode('latin') chunk, iv2, tag2 = encrypt_AES(shared_key_derived, str(chunk).encode(), "GCM") iv2 = base64.b64encode(iv2).decode('latin') tag2 = base64.b64encode(tag2).decode('latin') info = json.dumps({"uuid": uuid_c, "iv": iv, "iv2": iv2, "tag1": tag1, "tag2": tag2}) elif algorithm == "ChaCha20": media_id, nonce = encrypt_ChaCha20(shared_key_derived, media_item["id"].encode()) nonce = base64.b64encode(nonce).decode('latin') chunk, nonce2 = encrypt_ChaCha20(shared_key_derived, str(chunk).encode()) nonce2 = base64.b64encode(nonce2).decode('latin') info = json.dumps({"uuid": uuid_c, "nonce": nonce, "nonce_chunk": nonce2}) else: print("erro") sys.exit(0) media_id = base64.urlsafe_b64encode(media_id).decode('latin') chunk = base64.urlsafe_b64encode(chunk).decode('latin') signature = client_sign(digest_c, info) payload = { "data": info, "signature": base64.b64encode(signature).decode('latin') } req = requests.get(f'{SERVER_URL}/api/download?id={media_id}&chunk={chunk}', data=json.dumps(payload)) req = req.json() signature = base64.b64decode(req["signature"].encode()) #verificar assinatura if digest_c == "SHA256": server_public_key_rsa.verify( signature, req["data"].encode(), paddingAsymetric.PSS( mgf=paddingAsymetric.MGF1(hashes.SHA256()), salt_length=paddingAsymetric.PSS.MAX_LENGTH ), hashes.SHA256() ) elif digest_c == "SHA512": server_public_key_rsa.verify( signature, req["data"].encode(), paddingAsymetric.PSS( mgf=paddingAsymetric.MGF1(hashes.SHA512()), salt_length=paddingAsymetric.PSS.MAX_LENGTH ), hashes.SHA512() ) else: print("Erro") sys.exit(0) req = json.loads(req["data"]) if algorithm == "AES256": try: data_encrypted = req["data"].encode() data_encrypted = base64.b64decode(data_encrypted) iv = req["iv"].encode() iv = base64.b64decode(iv) MAC = req["MAC"].encode() MAC = base64.b64decode(MAC) salt = req["salt"].encode() salt = base64.b64decode(salt) except: print(req["error"]) proc.kill() break return 0 if digest_c == "SHA256": kdf = PBKDF2HMAC( algorithm=hashes.SHA256(), length=32, salt=salt, iterations=100000, ) key = kdf.derive(shared_key_derived) elif digest_c == "SHA512": kdf = PBKDF2HMAC( algorithm=hashes.SHA512(), length=32, salt=salt, iterations=100000, ) key = kdf.derive(shared_key_derived) else: print("Erro") sys.exit(0) c = cmac.CMAC(algorithms.AES(key)) c.update(data_encrypted) c.verify(MAC) if mode == "CBC": data = decrypt_AES(key, iv, data_encrypted, "CBC") elif mode == "GCM": tag = req["tag"].encode() tag = base64.b64decode(tag) data = decrypt_AES(key, iv, data_encrypted, "GCM", tag) info = json.loads(data.decode('latin')) data = info["data"] data = binascii.a2b_base64(data) elif algorithm == "ChaCha20": #CHACHA20 Funciona try: nonce = req["nonce"].encode() nonce = base64.b64decode(nonce) data_encrypted = req["data"].encode() data_encrypted = base64.b64decode(data_encrypted) MAC = req["MAC"].encode() MAC = base64.b64decode(MAC) salt = req["salt"].encode() salt = base64.b64decode(salt) except: print(req["error"]) proc.kill() break return 0 if digest_c == "SHA256": kdf = PBKDF2HMAC( algorithm=hashes.SHA256(), length=32, salt=salt, iterations=100000, ) key = kdf.derive(shared_key_derived) h = hmac.HMAC(key, hashes.SHA256()) h.update(data_encrypted) h.verify(MAC) elif digest_c == "SHA512": kdf = PBKDF2HMAC( algorithm=hashes.SHA512(), length=32, salt=salt, iterations=100000, ) key = kdf.derive(shared_key_derived) h = hmac.HMAC(key, hashes.SHA512()) h.update(data_encrypted) h.verify(MAC) else: print("ERRO") sys.exit(0) data = decrypt_ChaCha20(key, nonce, data_encrypted) info = json.loads(data.decode('latin')) data = info["data"] data = binascii.a2b_base64(data) else: print("Erro") sys.exit(0) try: proc.stdin.write(data) except: break
context.check_hostname = False context.load_verify_locations('/etc/ssl/certs/ca-certificates.crt') conn = context.wrap_socket(socket.socket(socket.AF_INET), server_hostname=host) conn.do_handshake_on_connect = True conn.connect((host, port)) print(conn.cipher()) print(conn.version()) cert = ssl.get_server_certificate((host, port)) import cryptography from cryptography import x509 from cryptography.hazmat.backends import default_backend der = ssl.PEM_cert_to_DER_cert(cert) cert = x509.load_der_x509_certificate(der, default_backend()) pprint.pprint(cert) stats = context.session_stats() pprint.pprint(stats) conn.sendall(b"OPTIONS / HTTP/1.0\r\nHost: localhost\r\n\r\n") print("Connection information") pprint.pprint(conn.recv(1024).split(b"\r\n")) conn.close()
def get_signature(options, manifestInput, enc_data): signatures = manifestGet(manifestInput, 'signature.signatures') or [] input_hash = manifestGet(manifestInput, 'signature.hash') or b'' signing_tool = manifestGet(manifestInput, 'signing-tool') or '' if getattr(options, 'signing_tool', None): signing_tool = options.signing_tool # There should always be a signing key or signing tool on create. if not signing_tool and not getattr(options, 'private_key', None): if 'private-key' in manifestInput: try: options.private_key = open(manifestInput['private-key'], 'r') except: LOG.critical( 'No private key specified and default key ({}) cannot be opened' .format(manifestInput['private-key'])) sys.exit(1) else: LOG.critical( 'Resource is not signed and no signing key is provided.') sys.exit(1) # Get SHA-256 hash of content and sign it using private key sha_content = utils.sha_hash(enc_data) if len(signatures): # If a signature is provided in the input json, then the encoded content must match the provided hash # Signature validation is not performed, since this would require certificate acquisition, which may not be # possible if sha_content != binascii.a2b_hex(input_hash): LOG.critical( 'Manifest hash provided in input file does not match hashed output' ) LOG.critical('Expected: {0}'.format(input_hash)) LOG.critical('Actual: {0}'.format(binascii.b2a_hex(sha_content))) sys.exit(1) # TODO: perform best-effort signature validation signature = None if signing_tool: # get the key id key_id = manifestGet(manifestInput, 'signing-key-id') if hasattr(options, 'signing_key_id') and options.signing_key_id: key_id = options.signing_key_id digest_algo = 'sha256' infile = None with tempfile.NamedTemporaryFile(delete=False) as f: infile = f.name f.write(enc_data) f.flush() LOG.debug('Temporary manifest file: {}'.format(infile)) outfile = None with tempfile.NamedTemporaryFile(delete=False) as f: outfile = f.name LOG.debug('Temporary signature file: {}'.format(outfile)) try: cmd = [signing_tool, digest_algo, key_id, infile, outfile] LOG.debug('Running "{}" to sign manifest.'.format(' '.join(cmd))) # This command line is constructed internally, so we ignore bandit # warnings about executing a Popen. See: # https://bandit.readthedocs.io/en/latest/plugins/b603_subprocess_without_shell_equals_true.html p = subprocess.Popen(cmd) #nosec p.wait() if p.returncode != 0: LOG.critical('Signing tool failed.') sys.exit(1) with open(outfile, 'rb') as f: signature = f.read() except: LOG.critical('Failed to execute signing tool.') sys.exit(1) finally: os.unlink(infile) os.unlink(outfile) LOG.debug('Signature: {}'.format( binascii.b2a_hex(signature).decode('utf-8'))) elif hasattr(options, 'private_key') and options.private_key: sk = ecdsa.SigningKey.from_pem(options.private_key.read()) signature = sk.sign_digest(sha_content, sigencode=ecdsa.util.sigencode_der) certificates = [] # pick a signature block with no signature in it. inputCerts = manifestGet(manifestInput, 'certificates') or [] # If no certificate was provided in the manifest input or in options, if len(inputCerts) == 0: # then load the default certificate inputCerts = manifestInput.get('default-certificates', []) # If there is still no certificate, if len(inputCerts) == 0: # Search through all signature blocks for one that contains certificates but no signature for idx, sb in enumerate(signatures): if not 'signature' in sb: inputCerts = sb.get('certificates', []) # This signature will be appended later so we must trim it. del signatures[idx] break for idx, cert in enumerate(inputCerts): if not any(k in cert for k in ('file', 'uri')): LOG.critical( 'Could not find "file" or "uri" property for certificate') sys.exit(1) # If 'file', we just use the content in local file if 'file' in cert: fPath = cert['file'] if not os.path.isabs(fPath): fPath = os.path.join(os.path.dirname(options.input_file.name), cert['file']) content = utils.read_file(fPath) # Else we download the file contents else: content = utils.download_file(cert['uri']) # Figure our which extension the certificate has contentPath = cert['file'] if 'file' in cert else cert['uri'] ext = contentPath.rsplit('.', 1)[1] # Read the certificate file, and get DER encoded data if ext == 'pem': lines = content.replace(" ", '').split() content = binascii.a2b_base64(''.join(lines[1:-1])) # Verify the certificate hash algorithm # Extract subjectPublicKeyInfo field from X.509 certificate (see RFC3280) # fingerprint = utils.sha_hash(content) cPath = cert['file'] if 'file' in cert else cert['uri'] certObj = None try: certObj = x509.load_der_x509_certificate( content, cryptoBackends.default_backend()) except ValueError as e: LOG.critical("X.509 Certificate Error in ({file}): {error}".format( error=e, file=cPath)) sys.exit(1) if not certObj: LOG.critical( "({file}) is not a valid certificate".format(file=cPath)) sys.exit(1) if not isinstance(certObj.signature_hash_algorithm, cryptoHashes.SHA256): LOG.critical( "In ({file}): Only SHA256 certificates are supported by the Device Management Update client at this time." .format(file=cPath)) sys.exit(1) fingerprint = certObj.fingerprint(cryptoHashes.SHA256()) LOG.debug( 'Creating certificate reference ({}) from {} with fingerprint {}'. format(idx, contentPath, fingerprint)) uri = '' if 'uri' in cert: uri = cert['uri'] certificates.append( CertificateReference(fingerprint=fingerprint, uri=uri)) LOG.debug( 'Signed hash ({}) of encoded content ({} bytes) with resulting signature {}' .format(sha_content, len(enc_data), signature)) if signature: signatures.append( SignatureBlock(signature=signature, certificates=certificates)) return ResourceSignature(hash=sha_content, signatures=signatures)
def import_files(self, files, import_keys=False, key_password=None, key_nickname=None): """ Import certificates and a single private key from multiple files The files may be in PEM and DER certificate, PKCS#7 certificate chain, PKCS#8 and raw private key and PKCS#12 formats. :param files: Names of files to import :param import_keys: Whether to import private keys :param key_password: Password to decrypt private keys :param key_nickname: Nickname of the private key to import from PKCS#12 files """ key_file = None extracted_key = None extracted_certs = [] for filename in files: try: with open(filename, 'rb') as f: data = f.read() except IOError as e: raise RuntimeError("Failed to open %s: %s" % (filename, e.strerror)) # Try to parse the file as PEM file matches = list( re.finditer(br'-----BEGIN (.+?)-----(.*?)-----END \1-----', data, re.DOTALL)) if matches: loaded = False for match in matches: body = match.group() label = match.group(1) line = len(data[:match.start() + 1].splitlines()) if label in (b'CERTIFICATE', b'X509 CERTIFICATE', b'X.509 CERTIFICATE'): try: cert = x509.load_pem_x509_certificate(body) except ValueError as e: if label != b'CERTIFICATE': logger.warning( "Skipping certificate in %s at line %s: " "%s", filename, line, e) continue else: extracted_certs.append(cert) loaded = True continue if label in (b'PKCS7', b'PKCS #7 SIGNED DATA', b'CERTIFICATE'): try: certs = x509.pkcs7_to_certs(body) except ipautil.CalledProcessError as e: if label == b'CERTIFICATE': logger.warning( "Skipping certificate in %s at line %s: " "%s", filename, line, e) else: logger.warning( "Skipping PKCS#7 in %s at line %s: %s", filename, line, e) continue else: extracted_certs.extend(certs) loaded = True continue if label in (b'PRIVATE KEY', b'ENCRYPTED PRIVATE KEY', b'RSA PRIVATE KEY', b'DSA PRIVATE KEY', b'EC PRIVATE KEY'): if not import_keys: continue if key_file: raise RuntimeError( "Can't load private key from both %s and %s" % (key_file, filename)) # the args -v2 aes256 -v2prf hmacWithSHA256 are needed # on OpenSSL 1.0.2 (fips mode). As soon as FreeIPA # requires OpenSSL 1.1.0 we'll be able to drop them args = [ paths.OPENSSL, 'pkcs8', '-topk8', '-v2', 'aes256', '-v2prf', 'hmacWithSHA256', '-passout', 'file:' + self.pwd_file, ] if ((label != b'PRIVATE KEY' and key_password) or label == b'ENCRYPTED PRIVATE KEY'): key_pwdfile = ipautil.write_tmp_file(key_password) args += [ '-passin', 'file:' + key_pwdfile.name, ] try: result = ipautil.run(args, stdin=body, capture_output=True) except ipautil.CalledProcessError as e: logger.warning( "Skipping private key in %s at line %s: %s", filename, line, e) continue else: extracted_key = result.raw_output key_file = filename loaded = True continue if loaded: continue raise RuntimeError("Failed to load %s" % filename) # Try to load the file as DER certificate try: cert = x509.load_der_x509_certificate(data) except ValueError: pass else: extracted_certs.append(cert) continue # Try to import the file as PKCS#12 file if import_keys: try: self.import_pkcs12(filename, key_password) except Pkcs12ImportUnknownError: # the file may not be a PKCS#12 file, # go to the generic error about unrecognized format pass except RuntimeError as e: raise RuntimeError("Failed to load %s: %s" % (filename, str(e))) else: if key_file: raise RuntimeError( "Can't load private key from both %s and %s" % (key_file, filename)) key_file = filename server_certs = self.find_server_certs() if key_nickname: for nickname, _trust_flags in server_certs: if nickname == key_nickname: break else: raise RuntimeError( "Server certificate \"%s\" not found in %s" % (key_nickname, filename)) else: if len(server_certs) > 1: raise RuntimeError( "%s server certificates found in %s, " "expecting only one" % (len(server_certs), filename)) continue # Supported formats were tried but none succeeded raise RuntimeError("Failed to load %s: unrecognized format" % filename) if import_keys and not key_file: raise RuntimeError("No server certificates found in %s" % (', '.join(files))) for cert in extracted_certs: nickname = str(DN(cert.subject)) self.add_cert(cert, nickname, EMPTY_TRUST_FLAGS) if extracted_key: with tempfile.NamedTemporaryFile() as in_file, \ tempfile.NamedTemporaryFile() as out_file: for cert in extracted_certs: in_file.write(cert.public_bytes(x509.Encoding.PEM)) in_file.write(extracted_key) in_file.flush() out_password = ipautil.ipa_generate_password() out_pwdfile = ipautil.write_tmp_file(out_password) args = [ paths.OPENSSL, 'pkcs12', '-export', '-in', in_file.name, '-out', out_file.name, '-passin', 'file:' + self.pwd_file, '-passout', 'file:' + out_pwdfile.name, ] try: ipautil.run(args) except ipautil.CalledProcessError as e: raise RuntimeError( "No matching certificate found for private key from " "%s" % key_file) self.import_pkcs12(out_file.name, out_password)
def test_invalid_der(self, backend): with pytest.raises(ValueError): x509.load_der_x509_certificate(b"notacert", backend)
def handle_echo(reader, writer): global conn_cnt conn_cnt += 1 srvwrk = ServerWorker(conn_cnt) #Certificados passwd = b"1234" #ler pkcs12 Servidor p12S = crypto.load_pkcs12(open("p12/Servidor.p12", 'rb').read(), passwd) private_keyS2 = crypto.dump_privatekey(crypto.FILETYPE_PEM, p12S.get_privatekey()) private_keyS = serialization.load_pem_private_key( private_keyS2, password=None, backend=default_backend()) #ler certificado Servidor certificadoS = crypto.dump_certificate(crypto.FILETYPE_PEM, p12S.get_certificate()) while True: data = yield from reader.read(100) if data[:1] == b'E': break # --------------------------------- Diffie Hellman --------------------------------- # if data[:1] == b'y': y = data[1:] writer.write(b'OK') continue if data[:1] == b'p': p = data[1:] writer.write(b'OK') continue if data[:1] == b'g': g = data[1:] shared_key, b_y = dhPK(int.from_bytes(y, 'big'), int.from_bytes(g, 'big'), int.from_bytes(p, 'big')) #Envio das assinaturas, de g^x e g^y sig_servidor = sig_create(b_y) sig_cliente = sig_create(y) writer.write(b'K' + b_y) writer.write(b'S' + sig_cliente + sig_servidor) data = yield from reader.read(513) #Recebe as assinaturas if data[:1] == b's': sig_cliente = data[1:257] sig_servidor = data[257:] sig_verify(sig_cliente, y) sig_verify(sig_servidor, b_y) #continue data = yield from reader.read(1268) if data[:1] == b'c': certificadoC = data[1:] #public key do certificado cert = x509.load_der_x509_certificate(certificadoC, default_backend()) #public Key pk = cert.public_key() serialized_public_key = pk.public_bytes( serialization.Encoding.DER, serialization.PublicFormat.SubjectPublicKeyInfo) #validação with open('certificados/CA.pem', 'r') as cert_file: cert = cert_file.read() trusted_certs = (cert, cert) verified = verify_chain_of_trust(certificadoC, trusted_certs) if verified: print('\n \n Certificate verified \n \n') else: print('Certificado não é válido \n') continue # ------------------------------------- if not data: continue addr = writer.get_extra_info('peername') #nonce = data[1:9] #cipher = ChaCha20.new(key=shared_key, nonce=nonce) #msg = cipher.decrypt(data[9:]) res = srvwrk.respond(data[1:], addr) if not res: break #res = b'M' + cipher.encrypt(res) res = b'M' + res writer.write(res) yield from writer.drain() print("[%d]" % srvwrk.id) writer.close()
def do_POST(self): """This method handles the POST requests to add agents to the Registrar Server. Currently, only agents resources are available for POSTing, i.e. /agents. All other POST uri's will return errors. POST requests require an an agent_id identifying the agent to add, and json block sent in the body with 2 entries: ek and aik. """ session = SessionManager().make_session(engine) rest_params = config.get_restful_params(self.path) if rest_params is None: config.echo_json_response( self, 405, "Not Implemented: Use /agents/ interface") return if "agents" not in rest_params: config.echo_json_response(self, 400, "uri not supported") logger.warning( 'POST agent returning 400 response. uri not supported: ' + self.path) return agent_id = rest_params["agents"] if agent_id is None: config.echo_json_response(self, 400, "agent id not found in uri") logger.warning( 'POST agent returning 400 response. agent id not found in uri ' + self.path) return try: content_length = int(self.headers.get('Content-Length', 0)) if content_length == 0: config.echo_json_response(self, 400, "Expected non zero content length") logger.warning( 'POST for ' + agent_id + ' returning 400 response. Expected non zero content length.' ) return post_body = self.rfile.read(content_length) json_body = json.loads(post_body) ekcert = json_body['ekcert'] aik_tpm = json_body['aik_tpm'] initialize_tpm = tpm() if ekcert is None or ekcert == 'emulator': logger.warning('Agent %s did not submit an ekcert' % agent_id) ek_tpm = json_body['ek_tpm'] else: if 'ek_tpm' in json_body: # This would mean the agent submitted both a non-None ekcert, *and* # an ek_tpm... We can deal with it by just ignoring the ek_tpm they sent logger.warning( 'Overriding ek_tpm for agent %s from ekcert' % agent_id) # If there's an EKCert, we just overwrite their ek_tpm # Note, we don't validate the EKCert here, other than the implicit # "is it a valid x509 cert" check. So it's still untrusted. # This will be validated by the tenant. ek509 = load_der_x509_certificate( base64.b64decode(ekcert), backend=default_backend(), ) ek_tpm = base64.b64encode( tpm2_objects.ek_low_tpm2b_public_from_pubkey( ek509.public_key(), )) aik_attrs = tpm2_objects.get_tpm2b_public_object_attributes( base64.b64decode(aik_tpm), ) if aik_attrs != tpm2_objects.AK_EXPECTED_ATTRS: config.echo_json_response(self, 400, "Invalid AK attributes") logger.warning( "Agent %s submitted AIK with invalid attributes! %s (provided) != %s (expected)", agent_id, tpm2_objects.object_attributes_description(aik_attrs), tpm2_objects.object_attributes_description( tpm2_objects.AK_EXPECTED_ATTRS), ) return # try to encrypt the AIK (blob, key) = initialize_tpm.encryptAIK( agent_id, base64.b64decode(ek_tpm), base64.b64decode(aik_tpm), ) # special behavior if we've registered this uuid before regcount = 1 try: agent = session.query(RegistrarMain).filter_by( agent_id=agent_id).first() except NoResultFound: agent = None except SQLAlchemyError as e: logger.error(f'SQLAlchemy Error: {e}') raise if agent is not None: # keep track of how many ek-ekcerts have registered on this uuid regcount = agent.regcount if agent.ek_tpm != ek_tpm or agent.ekcert != ekcert: logger.warning( 'WARNING: Overwriting previous registration for this UUID with new ek-ekcert pair!' ) regcount += 1 # force overwrite logger.info('Overwriting previous registration for this UUID.') try: session.query(RegistrarMain).filter_by( agent_id=agent_id).delete() session.commit() except SQLAlchemyError as e: logger.error(f'SQLAlchemy Error: {e}') raise # Add values to database d = {} d['agent_id'] = agent_id d['ek_tpm'] = ek_tpm d['aik_tpm'] = aik_tpm d['ekcert'] = ekcert d['virtual'] = int(ekcert == 'virtual') d['active'] = int(False) d['key'] = key d['provider_keys'] = {} d['regcount'] = regcount try: session.add(RegistrarMain(**d)) session.commit() except SQLAlchemyError as e: logger.error(f'SQLAlchemy Error: {e}') raise response = { 'blob': blob, } config.echo_json_response(self, 200, "Success", response) logger.info('POST returning key blob for agent_id: ' + agent_id) except Exception as e: config.echo_json_response(self, 400, "Error: %s" % e) logger.warning("POST for " + agent_id + " returning 400 response. Error: %s" % e) logger.exception(e)
import base64 from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.serialization import load_pem_private_key YUBICO_SAMPLE_PUBLIC_CERT = x509.load_der_x509_certificate( data=base64.b64decode(b""" MIIBhzCCAS6gAwIBAgIJAJm+6LEMouwcMAkGByqGSM49BAEwITEfMB0GA1UEAwwW WXViaWNvIFUyRiBTb2Z0IERldmljZTAeFw0xMzA3MTcxNDIxMDNaFw0xNjA3MTYx NDIxMDNaMCExHzAdBgNVBAMMFll1YmljbyBVMkYgU29mdCBEZXZpY2UwWTATBgcq hkjOPQIBBggqhkjOPQMBBwNCAAQ74Zfdc36YPZ+w3gnnXEPIBl1J3pol6IviRAMc /hCIZFbDDwMs4bSWeFdwqjGfjDlICArdmjMWnDF/XCGvHYEto1AwTjAdBgNVHQ4E FgQUDai/k1dOImjupkubYxhOkoX3sZ4wHwYDVR0jBBgwFoAUDai/k1dOImjupkub YxhOkoX3sZ4wDAYDVR0TBAUwAwEB/zAJBgcqhkjOPQQBA0gAMEUCIFyVmXW7zlnY VWhuyCbZ+OKNtSpovBB7A5OHAH52dK9/AiEA+mT4tz5eJV8W2OwVxcq6ZIjrwqXc jXSy2G0k27yAUDk= """), backend=default_backend()) YUBICO_SAMPLE_PRIVATE_KEY = load_pem_private_key( b""" -----BEGIN EC PRIVATE KEY----- MHcCAQEEIMyk3gKcDg5lsYdl48fZoIFORhAc9cQxmn2Whv/+ya+2oAoGCCqGSM49 AwEHoUQDQgAEO+GX3XN+mD2fsN4J51xDyAZdSd6aJeiL4kQDHP4QiGRWww8DLOG0 lnhXcKoxn4w5SAgK3ZozFpwxf1whrx2BLQ== -----END EC PRIVATE KEY----- """, password=None, backend=default_backend(), )
def verify(self): try: # Step 1. # # Perform JSON deserialization on the clientDataJSON field # of the AuthenticatorAttestationResponse object to extract # the client data C claimed as collected during the credential # creation. credential_id = self.registration_response.get('id') raw_id = self.registration_response.get('rawId') attestation_object = self.registration_response.get('attObj') client_data = self.registration_response.get('clientData') credential_type = self.registration_response.get('type') decoded_cd = _webauthn_b64_decode(client_data) cd = json.loads(decoded_cd) # Step 2. # # Verify that the type in C is the string webauthn.create. received_type = cd.get('type') if not _verify_type(received_type, TYPE_CREATE): raise RegistrationRejectedException('Invalid type.') # Step 3. # # Verify that the challenge in C matches the challenge that # was sent to the authenticator in the create() call. received_challenge = cd.get('challenge') if not _verify_challenge(received_challenge, self.challenge): raise RegistrationRejectedException( 'Unable to verify challenge.') # Step 4. # # Verify that the origin in C matches the Relying Party's origin. if not _verify_origin(cd, self.origin): raise RegistrationRejectedException('Unable to verify origin.') # Step 5. # # Verify that the tokenBindingId in C matches the Token # Binding ID for the TLS connection over which the # attestation was obtained. if not _verify_token_binding_id(cd): raise RegistrationRejectedException( 'Unable to verify token binding ID.') # Step 6. # # Verify that the clientExtensions in C is a proper subset # of the extensions requested by the RP and that the # authenticatorExtensions in C is also a proper subset of # the extensions requested by the RP. if not _verify_client_extensions(cd): raise RegistrationRejectedException( 'Unable to verify client extensions.') if not _verify_authenticator_extensions(cd): raise RegistrationRejectedException( 'Unable to verify authenticator extensions.') # Step 7. # # Compute the hash of clientDataJSON using the algorithm # identified by C.hashAlgorithm. client_data_hash = _get_client_data_hash(cd, decoded_cd) # Step 8. # # Perform CBOR decoding on the attestationObject field of # the AuthenticatorAttestationResponse structure to obtain # the attestation statement format fmt, the authenticator # data authData, and the attestation statement attStmt. att_obj = cbor2.loads(_webauthn_b64_decode(attestation_object)) att_stmt = att_obj.get('attStmt') auth_data = att_obj.get('authData') fmt = att_obj.get('fmt') if not auth_data or len(auth_data) < 37: raise RegistrationRejectedException( 'Auth data must be at least 37 bytes.') # Step 9. # # Verify that the RP ID hash in authData is indeed the # SHA-256 hash of the RP ID expected by the RP. auth_data_rp_id_hash = _get_auth_data_rp_id_hash(auth_data) if not _verify_rp_id_hash(auth_data_rp_id_hash, self.rp_id): raise RegistrationRejectedException( 'Unable to verify RP ID hash.') # Step 10. # # Determine the attestation statement format by performing # an USASCII case-sensitive match on fmt against the set of # supported WebAuthn Attestation Statement Format Identifier # values. The up-to-date list of registered WebAuthn # Attestation Statement Format Identifier values is maintained # in the in the IANA registry of the same name # [WebAuthn-Registries]. if not _verify_attestation_statement_format(fmt): raise RegistrationRejectedException( 'Unable to verify attestation statement format.') # From authenticatorData, extract the claimed RP ID hash, the # claimed credential ID and the claimed credential public key. attestation_data = auth_data[37:] aaguid = attestation_data[:16] credential_id_len = struct.unpack('!H', attestation_data[16:18])[0] cred_id = attestation_data[18:18 + credential_id_len] b64_cred_id = _webauthn_b64_encode(cred_id) credential_pub_key = attestation_data[18 + credential_id_len:] # The [=credential public key=] encoded in COSE_Key format, as # defined in Section 7 of [[#RFC8152]], using the # [=CTAP canonical CBOR encoding form=]. # The COSE_Key-encoded [=credential public key=] MUST contain the optional "alg" # parameter and MUST NOT contain any other optional parameters # The "alg" parameter MUST contain a {{COSEAlgorithmIdentifier}} value. # The encoded [=credential public key=] MUST also contain any additional # required parameters stipulated by the relevant key type specification, # i.e., required for the key type "kty" and algorithm "alg" # (see Section 8 of[[RFC8152]]). cpk = cbor2.loads(credential_pub_key) # Credential public key parameter names via the COSE_Key spec (for ES256). alg_key = 3 x_key = -2 y_key = -3 if alg_key not in cpk: raise RegistrationRejectedException( "Credential public key missing required algorithm parameter." ) required_keys = {alg_key, x_key, y_key} cpk_keys = cpk.keys() if not set(cpk_keys).issuperset(required_keys): raise RegistrationRejectedException( 'Credential public key must match COSE_Key spec.') # A COSEAlgorithmIdentifier's value is a number identifying # a cryptographic algorithm. The algorithm identifiers SHOULD # be values registered in the IANA COSE Algorithms registry # [IANA-COSE-ALGS-REG], for instance, -7 for "ES256" and -257 # for "RS256". # https://www.iana.org/assignments/cose/cose.xhtml#algorithms # For now we are only supporting ES256 as an algorithm. ES256 = -7 if cpk[alg_key] != ES256: raise RegistrationRejectedException('Unsupported algorithm.') x = long(cpk[x_key].encode('hex'), 16) y = long(cpk[y_key].encode('hex'), 16) user_ec = EllipticCurvePublicNumbers( x, y, SECP256R1()).public_key(backend=default_backend()) encoded_user_pub_key = _encode_public_key(user_ec) # Verify public key length [65 bytes]. if len(encoded_user_pub_key) != 65: raise RegistrationRejectedException('Bad public key.') if fmt == 'fido-u2f': # Step 11. # # Verify that attStmt is a correct, validly-signed attestation # statement, using the attestation statement format fmt's # verification procedure given authenticator data authData and # the hash of the serialized client data computed in step 6. # Verify that the given attestation statement is valid CBOR # conforming to the syntax defined above. if 'x5c' not in att_stmt or 'sig' not in att_stmt: raise RegistrationRejectedException( 'Attestation statement must be a valid CBOR object.') # If x5c is not a certificate for an ECDSA public key over the # P-256 curve, stop verification and return an error. # Let authenticatorData denote the authenticator data claimed # to have been used for the attestation, and let clientDataHash # denote the hash of the serialized client data. # If clientDataHash is 256 bits long, set tbsHash to this value. # Otherwise set tbsHash to the SHA-256 hash of clientDataHash. if len(client_data_hash) == 32: tbs_hash = client_data_hash else: tbs_hash = hashlib.sha256(client_data_hash).digest() # Generate the claimed to-be-signed data as specified in # [FIDO-U2F-Message-Formats] section 4.3, with the application # parameter set to the claimed RP ID hash, the challenge # parameter set to tbsHash, the key handle parameter set to # the claimed credential ID of the given credential, and the # user public key parameter set to the claimed credential # public key. cert = att_stmt.get('x5c')[0] x509_attestation_cert = load_der_x509_certificate( cert, default_backend()) public_key = x509_attestation_cert.public_key() signature = att_stmt['sig'] bytes_to_sign = ''.join([ '\0', auth_data_rp_id_hash, tbs_hash, cred_id, encoded_user_pub_key ]) # Verify that the sig is a valid ECDSA P-256 signature over the # to-be-signed data constructed above. # The signature is to be verified by the relying party using the # public key certified in the attestation certificate. The relying # party should also verify that the attestation certificate was # issued by a trusted certification authority. try: public_key.verify(signature, bytes_to_sign, ECDSA(SHA256())) except InvalidSignature: raise RegistrationRejectedException( 'Invalid signature received.') # If successful, return attestation type Basic with the trust # path set to x5c. # Possible attestation types: Basic, Privacy CA, # Self Attestation, ECDAA attestation_type = AT_BASIC trust_path = x509_attestation_cert # Step 12. # # If validation is successful, obtain a list of acceptable trust # anchors (attestation root certificates or ECDAA-Issuer public # keys) for that attestation type and attestation statement format # fmt, from a trusted source or from policy. For example, the FIDO # Metadata Service [FIDOMetadataService] provides one way to obtain # such information, using the AAGUID in the attestation data # contained in authData. trust_anchors = _get_trust_anchors(attestation_type, fmt, self.trust_anchor_dir) if not trust_anchors and self.trusted_attestation_cert_required: raise RegistrationRejectedException( 'No trust anchors available to verify attestation certificate.' ) # Step 13. # # Assess the attestation trustworthiness using the outputs of the # verification procedure in step 10, as follows: # # * If self attestation was used, check if self attestation is # acceptable under Relying Party policy. # * If ECDAA was used, verify that the identifier of the # ECDAA-Issuer public key used is included in the set of # acceptable trust anchors obtained in step 11. # * Otherwise, use the X.509 certificates returned by the # verification procedure to verify that the attestation # public key correctly chains up to an acceptable root # certificate. if attestation_type == AT_SELF_ATTESTATION: if not self.self_attestation_permitted: raise RegistrationRejectedException( 'Self attestation is not permitted.') elif attestation_type == AT_PRIVACY_CA: raise NotImplementedError( 'Privacy CA attestation type is not currently supported.' ) elif attestation_type == AT_ECDAA: raise NotImplementedError( 'ECDAA attestation type is not currently supported.') elif attestation_type == AT_BASIC: if self.trusted_attestation_cert_required: if not _is_trusted_attestation_cert( trust_path, trust_anchors): raise RegistrationRejectedException( 'Untrusted attestation certificate.') else: raise RegistrationRejectedException( 'Unknown attestation type.') # Step 14. # # If the attestation statement attStmt verified successfully and is # found to be trustworthy, then register the new credential with the # account that was denoted in the options.user passed to create(), # by associating it with the credential ID and credential public key # contained in authData's attestation data, as appropriate for the # Relying Party's systems. # Step 15. # # If the attestation statement attStmt successfully verified but is # not trustworthy per step 12 above, the Relying Party SHOULD fail # the registration ceremony. # # NOTE: However, if permitted by policy, the Relying Party MAY # register the credential ID and credential public key but # treat the credential as one with self attestation (see # 5.3.3 Attestation Types). If doing so, the Relying Party # is asserting there is no cryptographic proof that the # public key credential has been generated by a particular # authenticator model. See [FIDOSecRef] and [UAFProtocol] # for a more detailed discussion. elif fmt == 'none': # `none` - indicates that the Relying Party is not interested in # authenticator attestation. if not self.none_attestation_permitted: raise RegistrationRejectedException( 'Authenticator attestation is required.') else: raise RegistrationRejectedException('Invalid format.') sc = auth_data[33:37] sign_count = struct.unpack('!I', sc)[0] credential = WebAuthnCredential( self.rp_id, self.origin, b64_cred_id, base64.b64encode(encoded_user_pub_key), sign_count) return credential except Exception as e: raise RegistrationRejectedException( 'Registration rejected. Error: {}.'.format(e))
] cert_chain_raw_bad_ee = [ bytearray.fromhex( '30820243308201c9a003020102020601779e45a1bd300a06082a8648ce3d0403023048311c301a06035504030c134170706c6520576562417574686e204341203131133011060355040a0c0a4170706c6520496e632e3113301106035504080c0a43616c69666f726e6961301e170d3231303231333032303133395a170d3231303231363032303133395a3081913149304706035504030c4030326631346235316537383963333366666561643561316139303738653334633162666634626634313064313238386338626365356530373034306431316536311a3018060355040b0c114141412043657274696669636174696f6e31133011060355040a0c0a4170706c6520496e632e3113301106035504080c0a43616c69666f726e69613059301306072a8648ce3d020106082a8648ce3d03010703420004ef53b98262ba5ae96cbcb5fc73a529e207c968f771c29b60cc786144ed78c0e54c9396c99ca097331090f2ceb421d736781e155f744c599d91e7dea21feb92cba3553053300c0603551d130101ff04023000300e0603551d0f0101ff0404030204f0303306092a864886f76364080204263024a12204202172935a7c1adf60c84e804769285222be2d5e357efcfd7df6cfa2cac35db06a300a06082a8648ce3d040302036800306502310086cfd661a7731ef2cc37ef9337ed9990e652f9d4333e4ab612565fef9e416ddd2810ed5feff5179a521d4f4336ff67db023019988f1c649298c41c5c86b6217dac70a4b208d749973c77cdc5243762b48def531c828ccb40d64ec3bf19b31a3db3de' ), bytearray.fromhex( '30820234308201baa003020102021056255395c7a7fb40ebe228d8260853b6300a06082a8648ce3d040303304b311f301d06035504030c164170706c6520576562417574686e20526f6f7420434131133011060355040a0c0a4170706c6520496e632e3113301106035504080c0a43616c69666f726e6961301e170d3230303331383138333830315a170d3330303331333030303030305a3048311c301a06035504030c134170706c6520576562417574686e204341203131133011060355040a0c0a4170706c6520496e632e3113301106035504080c0a43616c69666f726e69613076301006072a8648ce3d020106052b8104002203620004832e872f261491810225b9f5fcd6bb6378b5f55f3fcb045bc735993475fd549044df9bfe19211765c69a1dda050b38d45083401a434fb24d112d56c3e1cfbfcb9891fec0696081bef96cbc77c88dddaf46a5aee1dd515b5afaab93be9c0b2691a366306430120603551d130101ff040830060101ff020100301f0603551d2304183016801426d764d9c578c25a67d1a7de6b12d01b63f1c6d7301d0603551d0e04160414ebae82c4ffa1ac5b51d4cf24610500be63bd7788300e0603551d0f0101ff040403020106300a06082a8648ce3d0403030368003065023100dd8b1a3481a5fad9dbb4e7657b841e144c27b75b876a4186c2b1475750337227efe554457ef648950c632e5c483e70c102302c8a6044dc201fcfe59bc34d2930c1487851d960ed6a75f1eb4acabe38cd25b897d0c805bef0c7f78b07a571c6e80e07' ), bytearray.fromhex( '30820234308201baa003020102021056255395c7a7fb40ebe228d8260853b6300a06082a8648ce3d040303304b311f301d06035504030c164170706c6520576562417574686e20526f6f7420434131133011060355040a0c0a4170706c6520496e632e3113301106035504080c0a43616c69666f726e6961301e170d3230303331383138333830315a170d3330303331333030303030305a3048311c301a06035504030c134170706c6520576562417574686e204341203131133011060355040a0c0a4170706c6520496e632e3113301106035504080c0a43616c69666f726e69613076301006072a8648ce3d020106052b8104002203620004832e872f261491810225b9f5fcd6bb6378b5f55f3fcb045bc735993475fd549044df9bfe19211765c69a1dda050b38d45083401a434fb24d112d56c3e1cfbfcb9891fec0696081bef96cbc77c88dddaf46a5aee1dd515b5afaab93be9c0b2691a366306430120603551d130101ff040830060101ff020100301f0603551d2304183016801426d764d9c578c25a67d1a7de6b12d01b63f1c6d7301d0603551d0e04160414ebae82c4ffa1ac5b51d4cf24610500be63bd7788300e0603551d0f0101ff040403020106300a06082a8648ce3d0403030368003065023100dd8b1a3481a5fad9dbb4e7657b841e144c27b75b876a4186c2b1475750337227efe554457ef648950c632e5c483e70c102302c8a6044dc201fcfe59bc34d2930c1487851d960ed6a75f1eb4acabe38cd25b897d0c805bef0c7f78b07a571c6e80e07' ) ] try: cert_chain_good = [ x509.load_der_x509_certificate(cert, default_backend()) for cert in cert_chain_raw_good ] check_cert_chain(cert_chain_good, kAPPLE_WEBAUTHN_AUTHORITY_ROOT) except Exception as e: print('No exception was expected, aborting. Got: {}'.format(e)) raise excepted = False try: cert_chain_bad = [ x509.load_der_x509_certificate(cert, default_backend()) for cert in cert_chain_raw_bad ] check_cert_chain(cert_chain_bad, kAPPLE_WEBAUTHN_AUTHORITY_ROOT) except Exception as e:
def parse_server_certificate(self, tls_cert_msg, client, server, ts): """ Parses the certificate message """ connection_key = "{}-{}".format(client, server) assert isinstance(tls_cert_msg, dpkt.ssl.TLSCertificate) self.count_certificate_messages += 1 _tree = RootCATree.RootCATree(node_class=CertNode.CertNode) pre = None for crt in reversed(tls_cert_msg.certificates): try: cert = x509.load_der_x509_certificate(crt, default_backend()) except Exception as e: self.count_parsing_errors += 1 self.logger.debug(binascii.hexlify(crt)) self.logger.warning("[-] Parsing certificate failed.") self.logger.warning("[-] Error: {}".format(e)) self.logger.warning( "[-] Error occurred on connection: {}".format( connection_key)) self.logger.warning("[-] Skip this certificate chain...") return try: if cert.extensions.get_extension_for_oid( x509.ExtensionOID.BASIC_CONSTRAINTS).value.ca: _tree.create_node(tag=cert.subject.rfc4514_string(), identifier=binascii.hexlify( cert.fingerprint(hashes.SHA256())), data=cert, parent=pre) pre = binascii.hexlify(cert.fingerprint(hashes.SHA256())) except exceptions.DuplicatedNodeIdError: self.logger.info("[*] Node already exists: {}".format( cert.fingerprint(hashes.SHA256()))) pass except Exception as ex: if 'basicConstraints' in str(ex): pass else: self.logger.warning("[-] Error: {}".format(ex)) self.logger.warning( "[-] Error occurred on connection: {}".format( connection_key)) self.logger.warning("[-] Skip this certificate chain...") return self.logger.info( "[*] Try to find parent for each node in all root ca trees...") for node in _tree.all_nodes(): self.logger.info("[*] Try to find parent for node: {}".format( node.tag)) found = False try: akid = node.data.extensions.get_extension_for_oid( x509.ExtensionOID.AUTHORITY_KEY_IDENTIFIER ).value.key_identifier for ca_tree in self.root_ca_tree_list: found = ca_tree.search_nodes(node, ts) if found: break if not found: self.logger.info("[*] No Root certificate found") self.logger.info("[*] AKID exist: {}".format( binascii.hexlify(akid.decode()))) self.logger.info("[*] Maybe this cert is a root CA cert") # check root node of the ca tree list help_found = False for _ca_tree in self.root_ca_tree_list: root = _ca_tree.get_node(_ca_tree.root) if _ca_tree.check_if_is_root_ca(root, node, ts): help_found = True break if not help_found: self.logger.warning("[!] Nothing found to this cert") self.cert_with_no_parent.append(node.data) except Exception as ex: if 'authorityKeyIdentifier' in str(ex): self.logger.info( "[*] The Searching certificate has no authority key identifier. Maybe this cert is a root CA cert" ) # check root node of the ca tree list help_found = False for _ca_tree in self.root_ca_tree_list: root = _ca_tree.get_node(_ca_tree.root) if _ca_tree.check_if_is_root_ca(root, node, ts): help_found = True break if not help_found: self.logger.warning("[!] Nothing found to this cert") self.cert_with_no_parent.append(node.data) else: self.logger.error(str(ex)) self.logger.info("[*] End of this certificate message reached...")
def get_certificate(self) -> x509.Certificate: """Read an Opaque object from the YubiHSM, parsed as a certificate. :return: The certificate stored for the object. """ return x509.load_der_x509_certificate(self.get(), default_backend())
def __init__(self, server_cert): require_cryptography(self) self.server_cert = x509.load_der_x509_certificate( server_cert, default_backend()) self.key_size = self.server_cert.public_key().key_size // 8
def load_der_certificate(data): """ Loads a DER X.509 certificate. """ return x509.load_der_x509_certificate(data, default_backend())
def writeUpdateDefaults(options): if os.path.isfile(defaults.updateResources) and not (hasattr( options, 'force') and options.force): LOG.warning( '{} already exists, not overwriting. Use --force to write anyway.'. format(defaults.updateResources)) return with open(defaults.updateResources, 'w') as defaultResources: # Format the device UUIDs vendorId = ', '.join( ['0x%x' % x for x in bytes(uuid.UUID(options.vendor_id).bytes)]) classId = ', '.join( ['0x%x' % x for x in bytes(uuid.UUID(options.class_id).bytes)]) certFp = '' cert = '' str_ski = '' # Read the certificate if not hasattr(options, 'psk') or not options.psk: options.certificate.seek(0) cstr = options.certificate.read() try: # Load the certificate. certObj = x509.load_der_x509_certificate( cstr, default_backend()) except ValueError as e: raise ValueError('Error loading {}: {}'.format( options.certificate.name, e.message)) # Calculate the certificate fingerprint c_hash = certObj.fingerprint(hashes.SHA256()) # Format the certificate fingerprint certFp = ', '.join(['0x%x' % x for x in bytes(c_hash[:16]) ]) + ',\n ' + ', '.join( ['0x%x' % x for x in bytes(c_hash[16:])]) # Calculate the subjectKeyIdentifier try: c_ski = certObj.extensions.get_extension_for_oid( x509.oid.ExtensionOID.SUBJECT_KEY_IDENTIFIER).value.digest str_ski = ', '.join( ['0x%x' % x for x in bytes(c_ski[:16])]) + ',\n ' + ', '.join( ['0x%x' % x for x in bytes(c_ski[16:])]) except x509.ExtensionNotFound as e: LOG.warning('No Subject Key Identifier present in certificate') # Format the certificate CByteArray = ['0x%x' % x for x in bytes(cstr)] CLineList = [ ', '.join(CByteArray[i:i + 16]) for i in range(0, len(CByteArray), 16) ] cert = ',\n '.join(CLineList) defaultResources.write( templates.UpdateDefaultResources.format( vendorId=vendorId, classId=classId, certFp=certFp, ski=str_ski, cert=cert, psk=', '.join(['0x%x' % x for x in bytes(options.psk)]), pskId=', '.join(['0x%x' % x for x in bytes(options.psk_id)]))) LOG.info('Wrote default resource values to {}'.format( defaults.updateResources))
def get_loaded_cert(self): # just for the CC cc_cert = self.session.findObjects([(CKA_CLASS, CKO_CERTIFICATE)])[0] cc_certDer = bytes( self.session.getAttributeValue(cc_cert, [CKA_VALUE], True)[0]) return x509.load_der_x509_certificate(cc_certDer, default_backend())
result = client.make_credential(create_options["publicKey"], pin=pin) js = {} attestation_object = result[0] client_data = result[1] js_result_auth = {} js_result_auth['attestation_object'] = {} js_result_auth['attestation_object']['att_statement'] = {} js_result_auth['attestation_object']['att_statement'][ 'alg'] = attestation_object.att_statement['alg'] js_result_auth['attestation_object']['att_statement']['sig'] = str( binascii.hexlify(attestation_object.att_statement['sig'])) x5c = [] for elem in attestation_object.att_statement['x5c']: c = x509.load_der_x509_certificate(elem) x5c.append(str(c.public_bytes(serialization.Encoding.PEM))) js_result_auth['attestation_object']['att_statement']['x5c'] = copy.copy(x5c) js_result_auth['attestation_object']['data'] = str(attestation_object.data) js_result_auth['attestation_object']['auth_data'] = {} auth_data = attestation_object.auth_data js_result_auth['attestation_object']['auth_data'][ 'counter'] = auth_data.counter js_result_auth['attestation_object']['auth_data'][ 'extensions'] = auth_data.extensions js_result_auth['attestation_object']['auth_data']['flags'] = auth_data.flags js_result_auth['attestation_object']['auth_data']['credential_data'] = {} js_result_auth['attestation_object']['auth_data']['credential_data'][ 'aaguid'] = str(uuid.UUID(bytes=auth_data.credential_data.aaguid)) js_result_auth['attestation_object']['auth_data']['credential_data'][ 'credential_id'] = str(
def create_pem_bundle(destination, urls=None, resource_dir=None, set_env_var=True): """create a PEM formatted certificate bundle from the specified resources Args: destination(str, required): pathname for created pem bundle file urls(iterable, optional, default=None): passed to `download_resources` if specified, else the existing contents of `resource_dir` are processed; `urls` and/or `resource_dir` must be specified resource_dir(str, optional, default=None): location of resources to process; passed to `download_resources` along with `urls` if both specified, else a temporary location is utilized set_env_var(bool, optional, default=True): determines whether the `DOD_CA_CERTS_PEM_PATH` environmental variable is set with the value of created pem bundle pathname Returns: pathname of created pem bundle file """ if resource_dir is not None: assert os.exists(resource_dir) assert os.is_dir(resource_dir) else: assert urls is not None # `urls` or `resource_dir` must be specified if urls is not None: resource_dir = download_resources(urls, resource_dir) # create empty bytes stream pem_bundle = "# Bundle Created: {} \n".format(datetime.now()).encode() # get file list files = sorted(os.listdir(resource_dir)) # process CAs first then Roots for type in ['ca', 'root']: for file in files: if any([file.endswith(ext) for ext in cert_exts]): fpath = os.path.join(resource_dir, file) if file.lower().find(type) > -1 and os.path.isfile(fpath): with open(fpath, 'rb') as f: contents = f.read() try: cert = load_der_x509_certificate( contents, backend=default_backend()) except ValueError: try: cert = load_pem_x509_certificate( contents, backend=default_backend()) except ValueError: log.warning( 'Unable to load public key from: {}'. format(file)) continue # add cert's info and public key in PEM format to the bytes stream pem_bundle += describe_cert(cert).encode() pem_bundle += cert.public_bytes(Encoding.PEM) destination = os.path.abspath(destination) with open(destination, 'wb') as f: f.write(pem_bundle) if set_env_var: os.environ['DOD_CA_CERTS_PEM_PATH'] = destination log.info('Set DOD_CA_CERTS_PEM_PATH environment variable') return destination
def check_remote(self, instance): if not self._server: raise ConfigurationError( 'You must specify `server` in your configuration file.') try: self.log.debug('Checking that TLS service check can connect') sock = self.create_connection() except Exception as e: self.log.debug('Error occurred while connecting to socket: %s', str(e)) self.service_check(self.SERVICE_CHECK_CAN_CONNECT, self.CRITICAL, tags=self._tags, message=str(e)) return else: self.log.debug('TLS check able to connect') self.service_check(self.SERVICE_CHECK_CAN_CONNECT, self.OK, tags=self._tags) # Get the cert & TLS version from the connection with closing(sock): self.log.debug('Getting cert and TLS protocol version') try: with closing( self.tls_context.wrap_socket( sock, server_hostname=self._server_hostname) ) as secure_sock: der_cert = secure_sock.getpeercert(binary_form=True) protocol_version = secure_sock.version() self.log.debug( 'Received serialized peer certificate and TLS protocol version %s', protocol_version) except Exception as e: # https://docs.python.org/3/library/ssl.html#ssl.SSLCertVerificationError err_code = getattr(e, 'verify_code', None) message = getattr(e, 'verify_message', str(e)) self.log.debug( 'Error occurred while getting cert and TLS version from connection: %s', str(e)) self.service_check(self.SERVICE_CHECK_VALIDATION, self.CRITICAL, tags=self._tags, message=message) # There's no sane way to tell it to not validate just the expiration # This only works on Python 3.7+, see: https://bugs.python.org/issue28182 # https://github.com/openssl/openssl/blob/0b45d8eec051fd9816b6bf46a975fa461ffc983d/include/openssl/x509_vfy.h#L109 if err_code == 10: self.service_check(self.SERVICE_CHECK_EXPIRATION, self.CRITICAL, tags=self._tags, message='Certificate has expired') return # Load https://cryptography.io/en/latest/x509/reference/#cryptography.x509.Certificate try: self.log.debug('Deserializing peer certificate') cert = load_der_x509_certificate(der_cert, default_backend()) self.log.debug('Deserialized peer certificate: %s', cert) except Exception as e: self.log.debug('Error while deserializing peer certificate: %s', str(e)) self.service_check( self.SERVICE_CHECK_VALIDATION, self.CRITICAL, tags=self._tags, message='Unable to parse the certificate: {}'.format(e), ) return self.check_protocol_version(protocol_version) self.validate_certificate(cert) self.check_age(cert)