def cose_sign1_sign(private_key, data, data_is_detached=False, cert_bytes=None): if not cert_bytes: unprotected_headers = {} else: unprotected_headers = {33: cert_bytes} protected_headers = {COSE_LABEL_ALG: COSE_ALG_ECDSA_256} encoded_protected_headers = cbor.dumps(protected_headers) external_aad = b"" to_be_signed = cbor.dumps( ["Signature1", encoded_protected_headers, external_aad, data]) der_signature = private_key.sign(to_be_signed, ec.ECDSA(hashes.SHA256())) # print("sign: der_signature: %s" % binascii.hexlify(der_signature)) parsed_der_signature = Sequence.load(der_signature) r = parsed_der_signature[0] s = parsed_der_signature[1] # print("sign: r=%d s=%d" % (r, s)) encoded_r_and_s = (int(r).to_bytes(length=32, byteorder="big") + int(s).to_bytes(length=32, byteorder="big")) cose_sign1 = [ encoded_protected_headers, unprotected_headers, bytes(data) if not data_is_detached else b"", encoded_r_and_s ] return cose_sign1
def compute_issuer_public_keys(cert): sig = Sequence.load(cert["signature_value"].native) r = sig[0].native s = sig[1].native tbs_hash = sha512(cert["tbs_certificate"].dump()).digest() z = int.from_bytes(tbs_hash, byteorder="big") u1 = (z * pow(s, P521.q - 2, P521.q)) % P521.q u2 = (pow(r, P521.q - 2, P521.q) * s) % P521.q ky = (r**3 + P521.a * r + P521.b) % P521.p y0 = pow(ky, (P521.p + 1) // 4, P521.p) y1 = P521.p - y0 Q0 = u2 * (Point(r, y0, curve=P521) - u1 * P521.G) Q1 = u2 * (Point(r, y1, curve=P521) - u1 * P521.G) if key_id(Q0) == cert.authority_key_identifier: issuer_pub = Q0 elif key_id(Q1) == cert.authority_key_identifier: issuer_pub = Q1 if issuer_pub: return (issuer_pub, ) else: return (Q0, Q1)
def recompute_nonces(data, curve_name, hash_algo): try: curve = ec.get_curve(curve_name) except: curve = ec.load_curve(curve_name) verified = False for elem in data: if elem["nonce"] is not None: continue if elem["index"] % (len(data) // 10) == 0: print(".", end="") if hash_algo is None: hm = int.from_bytes(elem["data"], byteorder="big") % curve.group.n else: h = hashlib.new(hash_algo, elem["data"]) hm = int(h.hexdigest(), 16) if h.digest_size * 8 > curve.group.n.bit_length(): hm >> h.digest_size * 8 - curve.group.n.bit_length() hm = hm % curve.group.n r, s = Sequence.load(elem["signature"]).native.values() r = ec.Mod(r, curve.group.n) s = ec.Mod(s, curve.group.n) rx = r * elem["priv"] hmrx = hm + rx nonce = s.inverse() * hmrx if not verified: res = int(nonce) * curve.g if int(res.x) % curve.group.n != int(r): print("Nonce recomputation couldnt verify!") raise ValueError else: print("Nonce recomputation works") verified = True elem["nonce"] = int(nonce)
def attack(curve, hash, data, pubkey, params): """Wait for signatures to come in and start attack threads.""" signatures = [] threads = {} skip = params["attack"]["skip"] if skip != 0: print("[ ] Skipping {} signatures.".format(skip)) for _ in range(skip): signature_queue.get() thread_start = time.time() while True: while True: try: if not signatures: elapsed, resp = signature_queue.get() thread_start = time.time() else: elapsed, resp = signature_queue.get_nowait() try: r, s = Sequence.load(resp).native.values() except ValueError: print("[x] Failed to parse signature:", hexlify(resp), file=sys.stderr) continue sig = construct_signature(curve, hash, data, r, s, elapsed) ind = bisect(signatures, sig) signatures.insert(ind, sig) except Empty: break if solution_found.is_set(): return threads = { n: thread for n, thread in threads.items() if thread.is_alive() } if len(signatures) >= params["attack"]["start"] and \ len(threads) < params["max_threads"] and \ len(signatures) not in threads and \ (not threads or max(threads.keys()) < len(signatures) - params["attack"]["step"]): thread_name = str(len(signatures)) print("[ ] Starting attack thread {}.".format(thread_name)) sub_sigs = list(signatures[:params["dimension"]]) solution = lambda skey: solution_found.set() solve_thread = Solver(curve, sub_sigs, pubkey, params, solution, len(signatures)) solve_thread.name = thread_name solve_thread.daemon = True threads[len(signatures)] = solve_thread solve_thread.start() if len(signatures) > 1: print("[ ] Have {} signatures, {:.02f}/s.".format( len(signatures), len(signatures) / (time.time() - thread_start))) time.sleep(10)
def out(i, elapsed, resp): try: r, s = Sequence.load(resp).native.values() except ValueError: print("[x] Failed to parse signature:", hexlify(resp), file=sys.stderr) return writer.writerow({ "elapsed": elapsed, "r": hexlify(r.to_bytes(byteorder="big")), "s": hexlify(s.to_bytes(byteorder="big")) }) if i % 50 == 0: print("[ ] Collected {} signatures.".format(i), file=sys.stderr)
def file_dump(fname, complete=True, handler=gzip, out_file_prefix='dumped/'): with open(join(cache_folder, fname), 'rb') as f: print(f"\n===== {fname} =====", file=sys.stderr) data = f.read() i = int.from_bytes(data[12:16], LE) # Url Len url = str(data[24:24 + i])[2:-1] print(util.text_on_line(url, util.console_size()[0], '|'), file=sys.stderr) basename = out_file_prefix + fname eoc = re.search(FOOTER, data).start() # end of content # BUG?: cannot use data[...]: starts = [ _.start() for _ in re.finditer(b'\x30\x82', data) if _.start() > eoc + 52 ] #possib.certif.start if handler: with open(f'{basename}_tmp', 'ab') as f1,\ open(f'{basename}_answer', 'ab') as f2: m, n = handler(data[(24 + i):eoc], out_file=f1) # starts[0] # print(data[eoc:]) if data[eoc + 52:]: dissected, mime, _ = http_dissect(data[eoc + 52:]) f2.write(dissected) else: print('no associated response data') real_name = re.search(r'/([^/\?]+)(\?.*)?$', url)[1][-150:] # -150 cut to avoid OsError ext = MIME[mime] if '.' not in real_name else '' if m > 0 or n > 0: os.rename(f'{basename}_tmp', f'{basename}_{real_name}{ext}') if m <= 0 and n <= 0: os.remove(f'{basename}_tmp') #if n<=0: os.remove(f'{basename}_answer') end = i = 0 for start in starts: if start < end: continue parsed = Sequence.load(data[start:]).dump() end = start + len(parsed) with open(f'{basename}_cert{i}.der', 'wb') as ce: ce.write(parsed) i += 1 #if system("openssl x509 -inform DER -noout -text", shell=True, input=data[start:end]).returncode==0: if DUMP_LEVEL == 'all': with open(f'{basename}_the_rest', 'wb') as tr: tr.write(data[end:])
def decode_subjectaltname(self, data, counter): """ Decodes given ASN1 encoded subjectaltname data and returns an array of url strings """ result = [] parsed = Sequence.load(data) for i in range(0,len(parsed)): subjectaltname = parsed[i].native if isinstance(subjectaltname, int) or isinstance(subjectaltname, long): try: subjectaltname = binascii.unhexlify('%x' % subjectaltname) except TypeError: subjectaltname = subjectaltname elif isinstance(subjectaltname, basestring): subjectaltname = subjectaltname else: self.helper.log_warning("decode_subjectaltname: Unknown instance type %s found in entry %d. ASN1 data for debugging: %s" % (type(subjectaltname), counter, binascii.hexlify(data))) subjectaltname = '' subjectaltname_utf8 = self.fix_string_encoding(subjectaltname) if len(subjectaltname_utf8)>0: result.append(subjectaltname_utf8) return result
def from_DER(data: bytes) -> "SignatureResult": """Load an ECDSA signature from ASN.1 DER data.""" r, s = Sequence.load(data).native.values() return SignatureResult(r, s)
def key_id(point): public_key_info = Sequence.load( unarmor(encode_public_key(point).encode())[2]) return sha1(public_key_info[1].contents[1:]).digest()
from asn1crypto.core import Sequence data = bytes.fromhex( '6082055f06062b0601050502a08205533082054fa00d300b06092a864882f712010202a282053c048205386082053406092a864886f71201020201006e8205233082051fa003020105a10302010ea20703050020000000a3820467618204633082045fa003020105a10b1b09544553542e434f5250a2263024a003020102a11d301b1b04686f73741b1377696e3230313961642e746573742e636f7270a38204213082041da003020112a103020101a282040f0482040b9b26704503ae5ef7e345418cacd64767b86951112770ae3a30ac3b4a6b30230985b44534839420ed6ce9d0d6f6652d52ce10582282c4dc2e44b82058f2d2ebd06cd6d4131fed01e19ba50d53bf359dd3f506d13b4e56094da7f8e69472784cf42178556ba6be5e0a59adb18781acdc6c7d7291d71cb7612b72c3166ad4b512e12c8700e28675f70b771d6b0c5e626f832d80f4fc3d12a0dbfd0a2457807890de7790f1b9ac11ce36a5b0e15470d8c505ac612f23789b69e301c40a6b30ac5946282da91ce06cb34ed27e985b5a79c6cf6b093d70a10e2de5a8d3d9975482dfcb1f5ad17f2b709e6e445a07c0a356e1940d6b00d266f95d4eb31cd9228d414d8a90b9f18f0728ca89cba9058b8a0a7c915e42e02c2cf8ee147ce55fb829f67f944aeaee125d7ce09158cdeb77eda9377f2cfbf085e225362bb0083355bab853c65a0544657219a0de94f67fdf5b5f41e5063a0b20cfe7d595081b0db47b6a975a1f6cddac5baeb8c6f1fc4299dcedc7cbde791d74f98cf2986446154284a2b147d2484b251aa23680c71d882264d995f9dce09c63f5b6e51e8fdcde7ccb817e5ab1bd8e73c81c24b32b68c21e8185c5572ada086ce067818c930368b6867abf42bdff77ab24cf74f943600488246b4ebe36e8582f00a60e5aa63fa38274d330981d7ccfd54d39ce193b698cdfff97789f36433cfdbebfcdf41ae98449fe5777388bc47d89cfdba1873d78d759d491429c391e1654d8dc3fa37bef75235c934041202ab488acc79ea991c43c2dd3b75af7501f0a6f67034165d3649dd461c4895436c6f50ee30204eca1da1fcf1c127461a4327b8c65a7a88fa9c7003caccf7c2831a2ef4c94abf6f4694a219b2b9cc32aa36daa2ed2a4d483d24036db747604956b00451916b0fd53576131c2e0e4d0dd157a333b59880bb82cfa149d80c3f1344c387bcbbe029e5d37a53a551abd6308996ce0f5cdecc8163bc413ab84d15d6f17bf9c9b59218d546165bfc5b0e8641b27bab5bebb5c4c2eecc1d3d6387db714eb4de535fd9056cb7e379784abe64be6fb9b30e1ff6c101f7750a4cd3cf04cf9af6eff81f949a9cc2f6d879b413669dd62010b8caa8c88671428e04db89addaa224cc100e9c4fccb28edf3c0b94830896802d3130f36f20cc6898d13027a4ae27f9a9afedafc10e15d38d48f47643d90c476a9f8d2008533f32f64801d1996f5751bd7b35e0ce668cceb8b5e0e18bc1a1771cddeb3bbad3ab6a3f95a23503f15d984ef84c99e1f0d7a7e4a9ebe8aaf31d7406c63477e59413dcb3dffc59576e6646da5737f11a2182552ac95591273ab1e677bbf7b13828987076f9a8eef6f34ccdce2d12f256edf8470fd829af7d77fe0860cb761c1798eb07e6fa9e71be94c36a468c17f628b4553d3651e56bab4047384c43a28c7860b3da19ced8012edced2523a0ddcfb410fbd6889a4819e30819ba003020117a281930481901a0779c178be0a7253515f90f360fbdc60d2da211c47a1c3a615714894ba618dba8523ac78784509d77f26535e8b2c8927b29f83c20e5f49204b662bfa66be6629128e92fec86dde8ab3cd661cf27038db78dfe6bd1507970c4b0282df6a1a9a2c65dc939de8925bf539b56c94080d748423a456245bcdaae5c9697684e6e4a480c18e51491ec0e2ede8f8ff52d19d84' ) parsed = Sequence.load(data) parsed.debug()