def getSubjectPublicKeyInfo(self): algorithmIdentifier = rfc2459.AlgorithmIdentifier() algorithmIdentifier.setComponentByName('algorithm', rfc2459.rsaEncryption) algorithmIdentifier.setComponentByName('parameters', univ.Null()) spki = rfc2459.SubjectPublicKeyInfo() spki.setComponentByName('algorithm', algorithmIdentifier) spki.setComponentByName('subjectPublicKey', self.getSubjectPublicKey()) return spki
def asSubjectPublicKeyInfo(self): """Returns a subject public key info representing this key for use by pyasn1.""" algorithmIdentifier = rfc2459.AlgorithmIdentifier() algorithmIdentifier['algorithm'] = ecPublicKey algorithmIdentifier['parameters'] = self.keyOID spki = rfc2459.SubjectPublicKeyInfo() spki['algorithm'] = algorithmIdentifier spki['subjectPublicKey'] = univ.BitString(self.getPublicKeyHexifiedString()) return spki
def create(cls, parameters): asn1 = rfc2459.SubjectPublicKeyInfo() asn1["algorithm"] = rfc2459.AlgorithmIdentifier() asn1["algorithm"][ "algorithm"] = OIDDB.KeySpecificationAlgorithms.inverse( "id-dsa").to_asn1() asn1["algorithm"]["algorithm"] = parameters["curve"].oid.to_asn1() inner_key = parameters["curve"].point(parameters["x"], parameters["y"]).encode() asn1["subjectPublicKey"] = ASN1Tools.bytes2bitstring(inner_key) return asn1
def _get_pubickey_sha1_hash(cert): """ Gets pubkey sha1 hash """ pkey = cert.get_pubkey() pkey_asn1 = dump_publickey(FILETYPE_ASN1, pkey) decoded_pkey, _ = der_decoder.decode(pkey_asn1, rfc2459.SubjectPublicKeyInfo()) pubkey = bit_string_to_bytearray(decoded_pkey['subjectPublicKey']) # algorithm = decoded_pkey['algorithm'] # RSA encryption sha1_hash = hashlib.sha1() sha1_hash.update(pubkey) return sha1_hash
def create(cls, parameters): asn1 = rfc2459.SubjectPublicKeyInfo() asn1["algorithm"] = rfc2459.AlgorithmIdentifier() asn1["algorithm"][ "algorithm"] = OIDDB.KeySpecificationAlgorithms.inverse( "rsaEncryption").to_asn1() asn1["algorithm"]["parameters"] = pyasn1.type.univ.Any( value=pyasn1.codec.der.encoder.encode(pyasn1.type.univ.Null())) inner_key = rfc2437.RSAPublicKey() inner_key["modulus"] = pyasn1.type.univ.Integer(parameters["n"]) inner_key["publicExponent"] = pyasn1.type.univ.Integer(parameters["e"]) inner_key = pyasn1.codec.der.encoder.encode(inner_key) asn1["subjectPublicKey"] = ASN1Tools.bytes2bitstring(inner_key) return asn1
def __init__(self, type, format, algorithm, encoded, **kwargs): super(BksKeyEntry, self).__init__(**kwargs) self.type = type """An integer indicating the type of key: one of :const:`KEY_TYPE_PRIVATE`, :const:`KEY_TYPE_PUBLIC`, :const:`KEY_TYPE_SECRET`.""" self.format = format """A string indicating the format or encoding in which the key is stored. One of: ``PKCS8``, ``PKCS#8``, ``X.509``, ``X509``, ``RAW``.""" self.algorithm = algorithm """A string indicating the algorithm for which the key is valid.""" self.encoded = encoded """A byte string containing the key, formatted as indicated by the :attr:`format` attribute.""" if self.type == KEY_TYPE_PRIVATE: if self.format not in ["PKCS8", "PKCS#8"]: raise UnexpectedKeyEncodingException( "Unexpected encoding for private key entry: '%s'" % self.format) # self.encoded is a PKCS#8 PrivateKeyInfo private_key_info = decoder.decode( self.encoded, asn1Spec=rfc5208.PrivateKeyInfo())[0] self.pkey_pkcs8 = self.encoded self.pkey = private_key_info['privateKey'].asOctets() self.algorithm_oid = private_key_info['privateKeyAlgorithm'][ 'algorithm'].asTuple() elif self.type == KEY_TYPE_PUBLIC: if self.format not in ["X.509", "X509"]: raise UnexpectedKeyEncodingException( "Unexpected encoding for public key entry: '%s'" % self.format) # self.encoded is an X.509 SubjectPublicKeyInfo spki = decoder.decode(self.encoded, asn1Spec=rfc2459.SubjectPublicKeyInfo())[0] self.public_key_info = self.encoded self.public_key = bitstring_to_bytes(spki['subjectPublicKey']) self.algorithm_oid = spki['algorithm']['algorithm'].asTuple() elif self.type == KEY_TYPE_SECRET: if self.format != "RAW": raise UnexpectedKeyEncodingException( "Unexpected encoding for raw key entry: '%s'" % self.format) # self.encoded is an unwrapped/raw cryptographic key self.key = encoded self.key_size = len(encoded) * 8 else: raise UnexpectedKeyEncodingException( "Key format '%s' not recognized" % self.format)
def asSubjectPublicKeyInfo(self): """Returns a subject public key info representing this key for use by pyasn1.""" algorithmIdentifier = rfc2459.AlgorithmIdentifier() algorithmIdentifier['algorithm'] = rfc2459.rsaEncryption # Directly setting parameters to univ.Null doesn't currently work. nullEncapsulated = encoder.encode(univ.Null()) algorithmIdentifier['parameters'] = univ.Any(nullEncapsulated) spki = rfc2459.SubjectPublicKeyInfo() spki['algorithm'] = algorithmIdentifier rsaKey = RSAPublicKey() rsaKey['N'] = univ.Integer(self.RSA_N) rsaKey['E'] = univ.Integer(self.RSA_E) subjectPublicKey = univ.BitString(byteStringToHexifiedBitString(encoder.encode(rsaKey))) spki['subjectPublicKey'] = subjectPublicKey return spki
def asSubjectPublicKeyInfo(self): """Returns a subject public key info representing this key for use by pyasn1.""" algorithmIdentifier = rfc2459.AlgorithmIdentifier() algorithmIdentifier.setComponentByName('algorithm', rfc2459.rsaEncryption) algorithmIdentifier.setComponentByName('parameters', univ.Null()) spki = rfc2459.SubjectPublicKeyInfo() spki.setComponentByName('algorithm', algorithmIdentifier) rsaKey = RSAPublicKey() rsaKey.setComponentByName('N', univ.Integer(self.RSA_N)) rsaKey.setComponentByName('E', univ.Integer(self.RSA_E)) subjectPublicKey = univ.BitString( byteStringToHexifiedBitString(encoder.encode(rsaKey))) spki.setComponentByName('subjectPublicKey', subjectPublicKey) return spki
def create(cls, parameters): asn1 = rfc2459.SubjectPublicKeyInfo() asn1["algorithm"] = rfc2459.AlgorithmIdentifier() asn1["algorithm"][ "algorithm"] = OIDDB.KeySpecificationAlgorithms.inverse( "id-dsa").to_asn1() asn1["algorithm"]["parameters"] = rfc3279.Dss_Parms() asn1["algorithm"]["parameters"]["p"] = parameters["p"] asn1["algorithm"]["parameters"]["q"] = parameters["q"] asn1["algorithm"]["parameters"]["g"] = parameters["g"] inner_key = rfc3279.DSAPublicKey(parameters["pubkey"]) inner_key = pyasn1.codec.der.encoder.encode(inner_key) asn1["subjectPublicKey"] = ASN1Tools.bytes2bitstring(inner_key) return asn1
def asSubjectPublicKeyInfo(self): """Returns a subject public key info representing this key for use by pyasn1.""" algorithmIdentifier = rfc2459.AlgorithmIdentifier() algorithmIdentifier['algorithm'] = ecPublicKey algorithmIdentifier['parameters'] = self.keyOID spki = rfc2459.SubjectPublicKeyInfo() spki['algorithm'] = algorithmIdentifier # We need to extract the point that represents this key. # The library encoding of the key is an 8-byte id, followed by 2 # bytes for the key length in bits, followed by the point on the # curve (represented by two python longs). There appear to also # be 2 bytes indicating the length of the point as encoded, but # Decoder takes care of that. encoded = self.key.encode() _, _, points = encoding.Decoder(encoded).int(8).int(2).point(2).out() # '04' indicates that the points are in uncompressed form. hexifiedBitString = "'%s%s%s'H" % ('04', longToEvenLengthHexString(points[0]), longToEvenLengthHexString(points[1])) subjectPublicKey = univ.BitString(hexifiedBitString) spki['subjectPublicKey'] = subjectPublicKey return spki
def create(cls, parameters): asn1 = rfc2459.SubjectPublicKeyInfo() asn1["algorithm"][ "algorithm"] = OIDDB.KeySpecificationAlgorithms.inverse( "ecPublicKey").to_asn1() if parameters["curve"].oid is not None: asn1["algorithm"]["parameters"] = pyasn1.codec.der.encoder.encode( parameters["curve"].oid.to_asn1()) else: # TODO not implemented #domain_params = SpecifiedECDomain() #domain_params["version"] = 1 #asn1["algorithm"]["parameters"] = domain_params raise NotImplementedError( "Creation of explicitly specified elliptic curve domain parameters (i.e., non-named curves) is not implemented in x509sak" ) inner_key = parameters["curve"].point(parameters["x"], parameters["y"]).encode() asn1["subjectPublicKey"] = ASN1Tools.bytes2bitstring(inner_key) return asn1
def encode(cls, pki_key: object, **kwargs): # Algorithm ID alg_oid = cls.ALG_OID if type( cls.ALG_OID) is str else cls.ALG_OID(pki_key) alg_id = rfc2459.AlgorithmIdentifier() alg_id['algorithm'] = ObjectIdentifier(alg_oid) if cls.PARAM_ENCODER: alg_id['parameters'] = Any( encoder.encode(cls.PARAM_ENCODER.encode(pki_key))) # Serial number serial_num = rfc2459.CertificateSerialNumber( kwargs.get('serial_number') or 0) # Validity (time valid) not_before = kwargs.get('not_before') or datetime.now() not_after = kwargs.get('not_after') or not_before.replace( year=not_before.year + 1) validity = rfc2459.Validity() validity['notBefore'] = rfc2459.Time() validity['notBefore']['utcTime'] = UTCTime.fromDateTime(not_before) validity['notAfter'] = rfc2459.Time() validity['notAfter']['utcTime'] = UTCTime.fromDateTime(not_after) # Public key serialization pub_info = rfc2459.SubjectPublicKeyInfo() pub_info['algorithm'] = alg_id pub_info['subjectPublicKey'] = cls.PUB_KEY_ENCODER.encode(pki_key) # Issuer RDN issuer = rfc2459.Name() issuer.setComponentByPosition( 0, parse_rdn(kwargs.get('issuer') or 'CN=ca')) # Subject RDN subject = rfc2459.Name() subject.setComponentByPosition( 0, parse_rdn(kwargs.get('subject') or 'CN=ca')) # Signature algorithm signing_key = kwargs.get('signing_key') or pki_key if not (kwargs.get('signing_alg') or hasattr(signing_key, "X509_SIGNING_DEFAULT")): raise ValueError( "'signing_alg' not specified and 'signing_key' has no default algorithm" ) signing_alg = (kwargs.get('signing_alg') or signing_key.X509_SIGNING_DEFAULT).value signature_alg = rfc2459.AlgorithmIdentifier() signature_alg['algorithm'] = SIGNING_ALG_OIDS[signing_alg.name] if cls.PARAM_ENCODER: signature_alg['parameters'] = Any( encoder.encode(cls.PARAM_ENCODER.encode(pki_key))) # Extensions extensions = rfc2459.Extensions().subtype( explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3)) if kwargs.get('ca') and kwargs.get('ca') == True: # SKI pkey_bytes = Bytes(int(pub_info['subjectPublicKey'])) ski_ext = rfc2459.Extension() ski_ext['extnID'] = ObjectIdentifier([2, 5, 29, 14]) ski_ext['extnValue'] = OctetString( encoder.encode( rfc2459.SubjectKeyIdentifier(SHA1().hash(pkey_bytes)))) # CA basic constraint ca_value = rfc2459.BasicConstraints() ca_value.setComponentByName('cA', True) ca_ext = rfc2459.Extension() ca_ext.setComponentByName('extnID', '2.5.29.19') ca_ext.setComponentByName('critical', True) ca_ext.setComponentByName('extnValue', OctetString(encoder.encode(ca_value))) extensions.setComponentByPosition(0, ski_ext) extensions.setComponentByPosition(1, ca_ext) # Put together the TBSCert tbs_cert = rfc2459.TBSCertificate() tbs_cert['version'] = 2 tbs_cert['serialNumber'] = serial_num tbs_cert['signature'] = signature_alg tbs_cert['issuer'] = issuer tbs_cert['validity'] = validity tbs_cert['subject'] = subject tbs_cert['subjectPublicKeyInfo'] = pub_info tbs_cert['issuerUniqueID'] = kwargs.get('issuer_unique_id') or 10 tbs_cert['subjectUniqueID'] = kwargs.get('subject_unique_id') or 11 if len(extensions): tbs_cert['extensions'] = extensions # Inject or compute the TBSCert signature if kwargs.get('signature_value') is not None: sig_value = Bytes.wrap(kwargs.get('signature_value')).int() else: encoded_tbs = encoder.encode(tbs_cert, asn1Spec=rfc2459.TBSCertificate()) sig_value = signing_alg.sign(signing_key, encoded_tbs) # Build the Cert object cert = rfc2459.Certificate() cert['tbsCertificate'] = tbs_cert cert['signatureAlgorithm'] = signature_alg cert['signatureValue'] = sig_value encoded = encoder.encode(cert, asn1Spec=rfc2459.Certificate()) return X509Certificate.transport_encode(encoded, **kwargs)
class ActionScrape(BaseAction): _DERHandler = collections.namedtuple("DERHandler", [ "asn1_spec", "data_type", "extension", "pem_marker", "sanity_check_fn", "precedence" ]) _PEM_BEGIN = re.compile("^-----BEGIN (?P<marker>[ A-Za-z0-9]+)-----") _MARKERS = { "CERTIFICATE": "crt", "OPENSSH PRIVATE KEY": "openssh_key", "DSA PRIVATE KEY": "dsa_key", "RSA PRIVATE KEY": "rsa_key", "EC PRIVATE KEY": "ec_key", "PUBLIC KEY": "pubkey", "X509 CRL": "crl", "CERTIFICATE REQUEST": "csr", "NEW CERTIFICATE REQUEST": "csr", } _DER_CLASSES = { handler_class.data_type: handler_class for handler_class in ( _DERHandler(asn1_spec = rfc2459.Certificate(), data_type = "crt", extension = "der", pem_marker = "CERTIFICATE", sanity_check_fn = None, precedence = 10), _DERHandler(asn1_spec = rfc2437.RSAPrivateKey(), data_type = "rsa_key", extension = "der", pem_marker = "RSA PRIVATE KEY", sanity_check_fn = _DERSanityCheck.check_rsa_key, precedence = 20), _DERHandler(asn1_spec = rfc2459.DSAPrivateKey(), data_type = "dsa_key", extension = "der", pem_marker = "DSA PRIVATE KEY", sanity_check_fn = None, precedence = 20), _DERHandler(asn1_spec = rfc2459.SubjectPublicKeyInfo(), data_type = "pubkey", extension = "der", pem_marker = "PUBLIC KEY", sanity_check_fn = None, precedence = 30), _DERHandler(asn1_spec = x509sak.ASN1Models.ECPrivateKey(), data_type = "ec_key", extension = "der", pem_marker = "EC PRIVATE KEY", sanity_check_fn = _DERSanityCheck.check_ec_key, precedence = 20), _DERHandler(asn1_spec = x509sak.ASN1Models.PFX(), data_type = "pkcs12", extension = "p12", pem_marker = None, sanity_check_fn = None, precedence = 0), _DERHandler(asn1_spec = x509sak.ASN1Models.DSASignature(), data_type = "dsa_sig", extension = "der", pem_marker = None, sanity_check_fn = _DERSanityCheck.check_dsa_sig, precedence = 40), )} handler_classes = sorted(list(_DER_CLASSES.keys())) def __init__(self, cmdname, args): BaseAction.__init__(self, cmdname, args) # Plausibilize input parameters kwargs_checker = KwargsChecker(optional_arguments = set(self._DER_CLASSES.keys())) kwargs_checker.check(self._args.include_dertype, hint = "DER classes to be included") kwargs_checker.check(self._args.exclude_dertype, hint = "DER classes to be excluded") # Plausibilize output directory if os.path.exists(self._args.outdir) and (not self._args.force): raise Exception("Directory %s already exists. Remove it first or use --force." % (self._args.outdir)) try: os.makedirs(self._args.outdir) except FileExistsError: pass # Determine active DERHandler classes if len(self._args.include_dertype) == 0: active_der_types = set(self._DER_CLASSES.keys()) else: active_der_types = set(self._args.include_dertype) active_der_types -= set(self._args.exclude_dertype) self._active_der_types = [ self._DER_CLASSES[class_name] for class_name in active_der_types ] self._active_der_types.sort(key = lambda handler: (handler.precedence, handler.data_type)) self._stats = ActionScrapeStats(self._args) self._stats.set_active_der_types([ handler_class.data_type for handler_class in self._active_der_types ]) self._matches = Intervals() self._hashes = set() engine = ScrapeEngine(self._args.filename) if not self._args.no_pem: engine.search(self._find_pem, b"-----BEGIN ", min_length = 52, max_length = 32 * 1024) if (not self._args.no_der) and (len(self._active_der_types) > 0): self._log.debug("Looking for %d DER type(s): %s", len(self._active_der_types), ", ".join(handler.data_type for handler in self._active_der_types)) engine.search(self._find_der, bytes.fromhex("30"), min_length = 2, max_length = 32 * 1024) end_offset = engine.commence(start_offset = self._args.seek_offset, length = self._args.analysis_length, progress_callback = self._progress_callback) self._stats.finish(end_offset) self._stats.dump() if self._args.write_json is not None: JSONTools.write_to_file(self._stats.as_dict(), self._args.write_json) def _progress_callback(self, position, total_length, elapsed_secs): self._log.debug("Scan at %.0f MiB of %.0f MiB, %.1f%%. Average speed %.1f MiB/sec", position / 1024 / 1024, total_length / 1024 / 1024, position / total_length * 100, position / 1024 / 1024 / elapsed_secs) def _is_nested_match(self, offset, length): if self._args.extract_nested: # Completely disregard if we've already captured this. return False interval = Interval.begin_length(offset, length) if self._matches.fully_contained_in_subinterval(interval): # We already have this match. return True else: self._matches.add(interval) return False def _is_known_blob(self, data): if self._args.allow_non_unique_blobs: # We record the exact same file twice, always. return False blob_hash = hashlib.sha256(data).digest() if blob_hash in self._hashes: return True else: self._hashes.add(blob_hash) return False def _record_finding(self, offset, data_type, extension, data, encode_pem_marker = None, orig_extension = None): if orig_extension is None: orig_extension = data_type if len(data) == 0: self._stats.record_finding(offset, len(data), data_type, orig_extension, "discard:zero_length") return if self._is_nested_match(offset, len(data)): self._stats.record_finding(offset, len(data), data_type, orig_extension, "discard:nested") self._log.debug("Found %s/%s at offset 0x%x, length %d bytes, not recording nested match.", data_type, orig_extension, offset, len(data)) return if self._is_known_blob(data): self._stats.record_finding(offset, len(data), data_type, orig_extension, "discard:non-unique") self._log.debug("Found %s/%s at offset 0x%x, length %d bytes, not recording non-unique match.", data_type, orig_extension, offset, len(data)) return filename_args = { "otype": orig_extension, "type": data_type, "offset": offset, "ext": extension, } filename = self._args.outdir + "/" + (self._args.outmask % filename_args) self._stats.record_finding(offset, len(data), data_type, orig_extension, "written", filename) self._log.info("Found %s/%s at offset 0x%x, length %d bytes, saved as %s", data_type, orig_extension, offset, len(data), filename) if encode_pem_marker is not None: output_data = (PEMDataTools.data2pem(data, encode_pem_marker) + "\n").encode() else: output_data = data with open(filename, "wb") as f: f.write(output_data) def _find_pem(self, offset, data): self._stats.pem_potential_match() textdata = data.decode("ascii", errors = "ignore") result = self._PEM_BEGIN.match(textdata) if result is None: return result = result.groupdict() marker = result["marker"] full_re = re.compile("-----BEGIN %s-----(?P<pem_data>.*?)-----END %s-----" % (marker, marker), flags = re.DOTALL | re.MULTILINE) result = full_re.match(textdata) if result is None: return result = result.groupdict() pem_data = result["pem_data"] pem_data = pem_data.replace("\r", "") pem_data = pem_data.replace("\n", "") pem_data = pem_data.replace("\t", "") pem_data = pem_data.replace(" ", "") der_data = base64.b64decode(pem_data) self._stats.pem_successful_decode() data_type = self._MARKERS.get(marker, "unknown") self._record_finding(offset = offset, data_type = data_type, extension = "pem", data = der_data, encode_pem_marker = marker) def _find_der(self, offset, data): self._stats.der_potential_match() for der_candidate in self._active_der_types: try: self._stats.der_attempt_decode() (asn1, tail) = pyasn1.codec.der.decoder.decode(data, asn1Spec = der_candidate.asn1_spec) if len(tail) == 0: asn1_data = data else: asn1_data = data[:-len(tail)] self._stats.der_successful_decode() if (not self._args.disable_der_sanity_checks) and (der_candidate.sanity_check_fn is not None): # We want sanity checks enabled and for the successfully # deserialized ASN.1 blob there is a handler registered. # Execute it (it'll throw an exception on failure, which # we'll catch). der_candidate.sanity_check_fn(asn1) self._stats.der_passed_plausibility() if self._args.keep_original_der or (der_candidate.pem_marker is None): # Should not or cannot re-encode as PEM, write DER file self._record_finding(offset = offset, data_type = der_candidate.data_type, extension = der_candidate.extension, data = asn1_data) else: self._record_finding(offset = offset, data_type = der_candidate.data_type, extension = "pem", data = asn1_data, encode_pem_marker = der_candidate.pem_marker, orig_extension = der_candidate.extension) except pyasn1.error.PyAsn1Error as e: pass except _DERSanityCheck.SanityCheckFailedException as e: self._log.debug("Potential %s blob encountered at offset 0x%x, but failed sanity check: %s", der_candidate.data_type, offset, str(e)) self._stats.der_failed_plausibility()