def test_hweight_margin(self): r = int.from_bytes(os.urandom(16), byteorder="little") self.assertTrue( NumberTheory.hamming_weight_analysis(r).plausibly_random) n = 0x10000000000000 self.assertFalse( NumberTheory.hamming_weight_analysis(n).plausibly_random)
def analyze(self, signature_alg_oid, signature_alg_params, signature, root_cert=None): judgements = SecurityJudgements() signature_alg = SignatureAlgorithms.lookup("oid", signature_alg_oid) if signature_alg is None: judgements += SecurityJudgement( JudgementCode.X509Cert_Signature_Function_Unknown, "Certificate has unknown signature algorithm with OID %s. Cannot make security determination." % (signature_alg_oid), commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.LIMITED_SUPPORT) result = { "name": str(signature_alg_oid), "pretty": str(signature_alg_oid), "security": judgements, } return result if isinstance(signature_alg.value.oid, (tuple, list)): # Have more than one OID for this if signature_alg.value.oid[0] != signature_alg_oid: judgements += SecurityJudgement( JudgementCode.X509Cert_Signature_Function_DeprecatedOID, "Signature algorithm uses alternate OID %s for algorithm %s. Preferred OID would be %s." % (signature_alg_oid, signature_alg.name, signature_alg.value.oid[0]), commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.LIMITED_SUPPORT) if signature_alg.value.hash_fnc is not None: # Signature algorithm already implies a concrete hash function, already done. hash_fnc = signature_alg.value.hash_fnc else: # Signature algorithms depends and is not implied. (hash_fnc, new_judgements) = self._determine_hash_function( signature_alg, signature_alg_params) judgements += new_judgements if signature_alg.value.sig_fnc == SignatureFunctions.ecdsa: # Decode ECDSA signature asn1_details = ASN1Tools.safe_decode( signature, asn1_spec=rfc3279.ECDSA_Sig_Value()) judgements += self._DER_VALIDATOR_ECDSA_SIGNATURE.validate( asn1_details) if (asn1_details.asn1 is not None) and (root_cert is not None): if root_cert.pubkey.pk_alg.value.cryptosystem == Cryptosystems.ECC_ECDSA: # Check that this is really a potential parent CA certificate ca_curve = root_cert.pubkey.curve hweight_analysis = NumberTheory.hamming_weight_analysis( int(asn1_details.asn1["r"]), min_bit_length=ca_curve.field_bits) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_Signature_ECDSA_R_BitBiasPresent, "Hamming weight of ECDSA signature R parameter is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) hweight_analysis = NumberTheory.hamming_weight_analysis( int(asn1_details.asn1["s"]), min_bit_length=ca_curve.field_bits) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_Signature_ECDSA_S_BitBiasPresent, "Hamming weight of ECDSA signature S parameter is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) elif signature_alg.value.sig_fnc == SignatureFunctions.dsa: # Decode DSA signature asn1_details = ASN1Tools.safe_decode( signature, asn1_spec=rfc3279.Dss_Sig_Value()) judgements += self._DER_VALIDATOR_DSA_SIGNATURE.validate( asn1_details) if (asn1_details.asn1 is not None) and (root_cert is not None): if root_cert is not None: if root_cert.pubkey.pk_alg.value.cryptosystem == Cryptosystems.DSA: field_width = root_cert.pubkey.q.bit_length() hweight_analysis = NumberTheory.hamming_weight_analysis( int(asn1_details.asn1["r"]), min_bit_length=field_width) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_Signature_DSA_R_BitBiasPresent, "Hamming weight of DSA signature R parameter is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) hweight_analysis = NumberTheory.hamming_weight_analysis( int(asn1_details.asn1["s"]), min_bit_length=field_width) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_Signature_DSA_S_BitBiasPresent, "Hamming weight of DSA signature S parameter is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) result = { "name": signature_alg.name, "sig_fnc": self.algorithm("sig_fnc").analyze(signature_alg.value.sig_fnc), "security": judgements, } if hash_fnc is not None: result.update({ "pretty": signature_alg.value.sig_fnc.value.pretty_name + " with " + hash_fnc.value.pretty_name, "hash_fnc": self.algorithm("hash_fnc").analyze(hash_fnc), }) else: result.update({ "pretty": "%s with undetermined hash function" % (signature_alg.value.sig_fnc.value.pretty_name) }) return result
def analyze_n(self, n): judgements = SecurityJudgements() if n < 0: judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_RSA_Modulus_Negative, "Modulus uses incorrect encoding, representation is a negative integer.", commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION) # Fix up n so it's a positive integer for the rest of the tests bitlen = (n.bit_length() + 7) // 8 * 8 mask = (1 << bitlen) - 1 n = n & mask elif n == 0: judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_RSA_Modulus_Zero, "Modulus is zero, this is definitely a broken RSA public key.", bits=0, commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION) elif n == 1: judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_RSA_Modulus_One, "Modulus is one, this is definitely a broken RSA public key.", bits=0, commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION) if self._test_probable_prime: if NumberTheory.is_probable_prime(n): judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_RSA_Modulus_Prime, "Modulus is prime, not a compound integer as we would expect for RSA.", bits=0) if self._pollards_rho_iterations > 0: small_factor = NumberTheory.pollard_rho( n, max_iterations=self._pollards_rho_iterations) if small_factor is not None: judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_RSA_Modulus_Factorable, "Modulus has small factor (%d) and is therefore trivially factorable." % (small_factor), bits=0) match = ModulusDB().find(n) if match is not None: judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_RSA_Modulus_FactorizationKnown, "Modulus is known to be compromised: %s" % (match.text), bits=0) hweight_analysis = NumberTheory.hamming_weight_analysis(n) if (hweight_analysis is not None) and (not hweight_analysis.plausibly_random): judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_RSA_Modulus_BitBiasPresent, "Modulus does not appear to be random. Expected a Hamming weight between %d and %d for a %d bit modulus, but found Hamming weight %d." % (hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight, hweight_analysis.bitlen, hweight_analysis.hweight), commonness=Commonness.HIGHLY_UNUSUAL) # We estimate the complexity of factoring the modulus by the asymptotic # complexity of the GNFS. bits_security = NumberTheory.asymtotic_complexity_gnfs_bits(n) judgements += self.algorithm("bits").analyze( JudgementCode.X509Cert_PublicKey_RSA_Modulus_LengthInBits, bits_security) return judgements
def _analyze_curve(self, pubkey): judgements = SecurityJudgements() curve = pubkey.curve # Check that the encoded public key point is on curve first Q = curve.point(pubkey.x, pubkey.y) if not Q.on_curve(): judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_ECC_PublicKeyPoint_NotOnCurve, "Public key point Q is not on the underlying curve %s." % (pubkey.curve), bits=0) # Check that the encoded public key is not Gx if Q.x == curve.Gx: judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_ECC_PublicKeyPoint_IsGenerator, "Public key point Q_x is equal to generator G_x on curve %s." % (pubkey.curve), bits=0) # We assume, completely out-of-the-blue and worst-case estimate, 32 # automorphisms that could be present for any curve (see Duursma et # al., "Speeding up the discrete log computation on curves with # automorphisms"). Therefore, for a given order n, we estimate the # complexity in bits as: # # b = log2(sqrt(n / 32)) = (log2(n) / 2) - 2.5 approx_curve_order_bits = curve.order_bits bits_security = (approx_curve_order_bits / 2) - 2.5 # We then take into account anomalous binary curves (Koblitz curves) as # well and use the approximations of Wiener/Zuccherato ("Faster Attacks # on Elliptic Curve Cryptosystems") literature = LiteratureReference( author=["Michael J. Wiener", "Robert J. Zuccherato"], title="Faster Attacks on Elliptic Curve Cryptosystems", year=1999, source="Selected Areas in Cryptography 1998; LNCS 1556") if isinstance(curve, BinaryFieldEllipticCurve) and curve.is_koblitz: speedup = math.sqrt(2 * curve.m) bits_security -= math.log(speedup, 2) judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_ECC_DomainParameters_CurveProperty_KoblitzCurve, "Binary field Koblitz curves (anomalous binary curves) have more efficient attacks than their non-anomalous binary curves; in this case improving attack performance by a factor of ~%.1f." % (speedup), commonness=Commonness.UNUSUAL, literature=literature) if isinstance(curve, PrimeFieldEllipticCurve) and curve.is_koblitz: # The math here is a bit shady. Firstly, Koblitz curves over F_p # only mean there's an efficiently computable endomorphism (e.g., # R. Gallant (1999); "Faster elliptic curve cryptography using # efficient endomorphisms"). We do not check for that, however, but # instead rely on dull "b = 0 and a is small" check. # Additionally, Wiener and Zuccherato describe curves of form # y^2 = x^3 - ax or y^2 = x^3 + b (which, for our a/b check, is not # the case) and, for the latter, describe a sqrt(6) speedup. We # just take that as is, knowing full well it's just guesswork. speedup = math.sqrt(6) bits_security -= math.log(speedup, 2) judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_ECC_DomainParameters_CurveProperty_KoblitzCurve, "Prime field Koblitz curves might have more efficient attacks than non-Koblitz curves. In this case, attack performance improves roughly by a factor of ~%.1f." % (speedup), commonness=Commonness.UNUSUAL, literature=literature) bits_security = math.floor(bits_security) judgements += self.algorithm("bits").analyze( JudgementCode.X509Cert_PublicKey_ECC_CurveOrderInBits, bits_security) # Check if the affine X/Y coordinates of the public key are about the # same length as the curve order. If randomly generated, both X and Y # should be about the same bitlength as the generator order and the # hamming weight should be roughly half of the bitlength of the curve # order. hweight_analysis = NumberTheory.hamming_weight_analysis( pubkey.x, min_bit_length=curve.field_bits) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_ECC_PublicKeyPoint_X_BitBiasPresent, "Hamming weight of public key field element's X coordinate is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) hweight_analysis = NumberTheory.hamming_weight_analysis( pubkey.y, min_bit_length=curve.field_bits) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_ECC_PublicKeyPoint_Y_BitBiasPresent, "Hamming weight of public key field element's Y coordinate is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) if isinstance(curve, BinaryFieldEllipticCurve): judgements += self._judge_binary_field_curve(curve) elif isinstance(curve, PrimeFieldEllipticCurve): judgements += self._judge_prime_field_curve(curve) return judgements
def analyze(self, pubkey): judgements = SecurityJudgements() L = pubkey.p.bit_length() N = pubkey.q.bit_length() if not NumberTheory.is_probable_prime(pubkey.p): standard = LiteratureReference( quote="p: a prime modulus", sect="4.1", author="National Institute of Standards and Technology", title="FIPS PUB 186-4: Digital Signature Standard (DSS)", year=2013, month=7, doi="10.6028/NIST.FIPS.186-4") judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_DSA_Parameters_P_NotPrime, "DSA parameter p is not prime.", commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION, bits=0, standard=standard) if not NumberTheory.is_probable_prime(pubkey.q): standard = LiteratureReference( quote="q: a prime divisor of (p - 1)", sect="4.1", author="National Institute of Standards and Technology", title="FIPS PUB 186-4: Digital Signature Standard (DSS)", year=2013, month=7, doi="10.6028/NIST.FIPS.186-4") judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_DSA_Parameters_Q_NotPrime, "DSA parameter q is not prime.", commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION, bits=0, standard=standard) if ((pubkey.p - 1) % pubkey.q) != 0: standard = LiteratureReference( quote="q: a prime divisor of (p - 1)", sect="4.1", author="National Institute of Standards and Technology", title="FIPS PUB 186-4: Digital Signature Standard (DSS)", year=2013, month=7, doi="10.6028/NIST.FIPS.186-4") judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_DSA_Parameters_Q_NoDivisorOfP1, "DSA parameter q is not a divisor of (p - 1).", commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION, bits=0, standard=standard) if pow(pubkey.g, pubkey.q, pubkey.p) != 1: judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_DSA_Parameters_G_Invalid, "DSA parameter g is not valid. In particular, g^q mod p != 1.", commonness=Commonness.HIGHLY_UNUSUAL, bits=0) if (pubkey.g <= 1) or (pubkey.g >= pubkey.p): standard = LiteratureReference( quote= "g: a generator of a subgroup of order q in the multiplicative group of GF(p), such that 1 < g < p", sect="4.1", author="National Institute of Standards and Technology", title="FIPS PUB 186-4: Digital Signature Standard (DSS)", year=2013, month=7, doi="10.6028/NIST.FIPS.186-4") judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_DSA_Parameters_G_InvalidRange, "DSA parameter g is not inside the valid range (1 < g < p).", commonness=Commonness.HIGHLY_UNUSUAL, compatibility=Compatibility.STANDARDS_DEVIATION, bits=0, standard=standard) hweight_analysis = NumberTheory.hamming_weight_analysis(pubkey.p) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_DSA_Parameters_P_BitBiasPresent, "Hamming weight of DSA prime p is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) hweight_analysis = NumberTheory.hamming_weight_analysis(pubkey.q) if not hweight_analysis.plausibly_random: judgements += SecurityJudgement( JudgementCode. X509Cert_PublicKey_DSA_Parameters_Q_BitBiasPresent, "Hamming weight of DSA prime q is %d at bitlength %d, but expected a weight between %d and %d when randomly chosen; this is likely not coincidential." % (hweight_analysis.hweight, hweight_analysis.bitlen, hweight_analysis.rnd_min_hweight, hweight_analysis.rnd_max_hweight), commonness=Commonness.HIGHLY_UNUSUAL) if (L in self._TYPICAL_L_N_VALUES) and ( N in self._TYPICAL_L_N_VALUES[L]): # Typical judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_DSA_L_N_Common, "DSA parameter values L/N (%d/%d) are common." % (L, N), commonness=Commonness.COMMON) else: # Non-typical judgements += SecurityJudgement( JudgementCode.X509Cert_PublicKey_DSA_L_N_Uncommon, "DSA parameter values L/N (%d/%d) are uncommon." % (L, N), commonness=Commonness.UNUSUAL) L_strength_bits = NumberTheory.asymtotic_complexity_gnfs_bits(pubkey.p) N_strength_bits = math.floor(N / 2) bits_security = min(L_strength_bits, N_strength_bits) judgements += self.algorithm("bits").analyze( JudgementCode.X509Cert_PublicKey_DSA_L_N, bits_security) result = { "cryptosystem": "dsa", "specific": { "L": L, "N": N, }, "security": judgements, } if self._analysis_options.include_raw_data: result["specific"]["p"] = { "value": pubkey.p, } result["specific"]["q"] = { "value": pubkey.q, } result["specific"]["g"] = { "value": pubkey.g, } result["specific"]["pubkey"] = { "value": pubkey.pubkey, } return result