def verify_sig(self, sig, include_headers, sig_header, dnsfunc): name = sig[b's'] + b"._domainkey." + sig[b'd'] + b"." try: pk, self.keysize = load_pk_from_dns(name, dnsfunc) except KeyFormatError as e: self.logger.error("%s" % e) return False try: canon_policy = CanonicalizationPolicy.from_c_value( sig.get(b'c', b'relaxed/relaxed')) except InvalidCanonicalizationPolicyError as e: raise MessageFormatError("invalid c= value: %s" % e.args[0]) hasher = HASH_ALGORITHMS[sig[b'a']] # validate body if present if b'bh' in sig: h = HashThrough(hasher()) body = canon_policy.canonicalize_body(self.body) if b'l' in sig: body = body[:int(sig[b'l'])] h.update(body) self.logger.debug("body hashed: %r" % h.hashed()) bodyhash = h.digest() self.logger.debug("bh: %s" % base64.b64encode(bodyhash)) try: bh = base64.b64decode(re.sub(br"\s+", b"", sig[b'bh'])) except TypeError as e: raise MessageFormatError(str(e)) if bodyhash != bh: raise ValidationError( "body hash mismatch (got %s, expected %s)" % (base64.b64encode(bodyhash), sig[b'bh'])) # address bug#644046 by including any additional From header # fields when verifying. Since there should be only one From header, # this shouldn't break any legitimate messages. This could be # generalized to check for extras of other singleton headers. if b'from' in include_headers: include_headers.append(b'from') h = HashThrough(hasher()) headers = canon_policy.canonicalize_headers(self.headers) self.signed_headers = hash_headers(h, canon_policy, headers, include_headers, sig_header, sig) self.logger.debug("signed for %s: %r" % (sig_header[0], h.hashed())) try: signature = base64.b64decode(re.sub(br"\s+", b"", sig[b'b'])) res = RSASSA_PKCS1_v1_5_verify(h, signature, pk) self.logger.debug("%s valid: %s" % (sig_header[0], res)) if res and self.keysize < self.minkey: raise KeyFormatError("public key too small: %d" % self.keysize) return res except (TypeError, DigestTooLargeError) as e: raise KeyFormatError("digest too large for modulus: %s" % e)
def sign(self, selector, domain, privkey, auth_results, chain_validation_status, include_headers=None, timestamp=None, standardize=False): try: pk = parse_pem_private_key(privkey) except UnparsableKeyError as e: raise KeyFormatError(str(e)) # Setup headers if include_headers is None: include_headers = self.default_sign_headers() if b'arc-authentication-results' not in include_headers: include_headers.append(b'arc-authentication-results') include_headers = tuple([x.lower() for x in include_headers]) # record what verify should extract self.include_headers = include_headers # rfc4871 says FROM is required if b'from' not in include_headers: raise ParameterError("The From header field MUST be signed") # raise exception for any SHOULD_NOT headers, call can modify # SHOULD_NOT if really needed. for x in set(include_headers).intersection(self.should_not_sign): raise ParameterError("The %s header field SHOULD NOT be signed" % x) max_instance, arc_headers_w_instance = self.sorted_arc_headers() instance = 1 if len(arc_headers_w_instance) != 0: instance = max_instance + 1 if instance == 1 and chain_validation_status != CV_None: raise ParameterError( "No existing chain found on message, cv should be none") elif instance != 1 and chain_validation_status == CV_None: raise ParameterError("cv=none not allowed on instance %d" % instance) new_arc_set = [] arc_headers = [y for x, y in arc_headers_w_instance] # Compute ARC-Authentication-Results aar_value = ("i=%d; " % instance).encode('utf-8') + auth_results if aar_value[-1] != b'\n': aar_value += b'\r\n' new_arc_set.append(b"ARC-Authentication-Results: " + aar_value) self.headers.insert(0, (b"arc-authentication-results", aar_value)) arc_headers.insert(0, (b"ARC-Authentication-Results", aar_value)) # Compute bh= canon_policy = CanonicalizationPolicy.from_c_value(b'relaxed/relaxed') self.hasher = HASH_ALGORITHMS[self.signature_algorithm] h = HashThrough(self.hasher()) h.update(canon_policy.canonicalize_body(self.body)) self.logger.debug("sign ams body hashed: %r" % h.hashed()) bodyhash = base64.b64encode(h.digest()) # Compute ARC-Message-Signature timestamp = str(timestamp or int(time.time())).encode('ascii') ams_fields = [ x for x in [ (b'i', str(instance).encode('ascii')), (b'a', self.signature_algorithm), (b'c', b'relaxed/relaxed'), (b'd', domain), (b's', selector), (b't', timestamp), (b'h', b" : ".join(include_headers)), (b'bh', bodyhash), # Force b= to fold onto it's own line so that refolding after # adding sig doesn't change whitespace for previous tags. (b'b', b'0' * 60), ] if x ] res = self.gen_header(ams_fields, include_headers, canon_policy, b"ARC-Message-Signature", pk, standardize) new_arc_set.append(b"ARC-Message-Signature: " + res) self.headers.insert(0, (b"ARC-Message-Signature", res)) arc_headers.insert(0, (b"ARC-Message-Signature", res)) # Compute ARC-Seal as_fields = [ x for x in [ (b'i', str(instance).encode('ascii')), (b'cv', chain_validation_status), (b'a', self.signature_algorithm), (b'd', domain), (b's', selector), (b't', timestamp), # Force b= to fold onto it's own line so that refolding after # adding sig doesn't change whitespace for previous tags. (b'b', b'0' * 60), ] if x ] as_include_headers = [x[0].lower() for x in arc_headers] as_include_headers.reverse() res = self.gen_header(as_fields, as_include_headers, canon_policy, b"ARC-Seal", pk, standardize) new_arc_set.append(b"ARC-Seal: " + res) self.headers.insert(0, (b"ARC-Seal", res)) arc_headers.insert(0, (b"ARC-Seal", res)) new_arc_set.reverse() return new_arc_set
def sign(self, selector, domain, privkey, identity=None, canonicalize=(b'relaxed', b'simple'), include_headers=None, length=False): try: pk = parse_pem_private_key(privkey) except UnparsableKeyError as e: raise KeyFormatError(str(e)) if identity is not None and not identity.endswith(domain): raise ParameterError("identity must end with domain") canon_policy = CanonicalizationPolicy.from_c_value( b'/'.join(canonicalize)) if include_headers is None: include_headers = self.default_sign_headers() include_headers = tuple([x.lower() for x in include_headers]) # record what verify should extract self.include_headers = include_headers # rfc4871 says FROM is required if b'from' not in include_headers: raise ParameterError("The From header field MUST be signed") # raise exception for any SHOULD_NOT headers, call can modify # SHOULD_NOT if really needed. for x in set(include_headers).intersection(self.should_not_sign): raise ParameterError("The %s header field SHOULD NOT be signed" % x) body = canon_policy.canonicalize_body(self.body) self.hasher = HASH_ALGORITHMS[self.signature_algorithm] h = self.hasher() h.update(body) bodyhash = base64.b64encode(h.digest()) sigfields = [ x for x in [ (b'v', b"1"), (b'a', self.signature_algorithm), (b'c', canon_policy.to_c_value()), (b'd', domain), (b'i', identity or b"@" + domain), length and (b'l', str(len(body)).encode('ascii')), (b'q', b"dns/txt"), (b's', selector), (b't', str(int(time.time())).encode('ascii')), (b'h', b" : ".join(include_headers)), (b'bh', bodyhash), # Force b= to fold onto it's own line so that refolding after # adding sig doesn't change whitespace for previous tags. (b'b', b'0' * 60), ] if x ] res = self.gen_header(sigfields, include_headers, canon_policy, b"DKIM-Signature", pk) self.domain = domain self.selector = selector self.signature_fields = dict(sigfields) return b'DKIM-Signature: ' + res
def assertAlgorithms(self, header_algo, body_algo, c_value): p = CanonicalizationPolicy.from_c_value(c_value) self.assertEqual( (header_algo, body_algo), (p.header_algorithm, p.body_algorithm))
def verify(self,idx=0,dnsfunc=get_txt): sigheaders = [(x,y) for x,y in self.headers if x.lower() == b"dkim-signature"] if len(sigheaders) <= idx: return False # By default, we validate the first DKIM-Signature line found. try: sig = parse_tag_value(sigheaders[idx][1]) self.signature_fields = sig except InvalidTagValueList as e: raise MessageFormatError(e) logger = self.logger logger.debug("sig: %r" % sig) validate_signature_fields(sig) self.domain = sig[b'd'] self.selector = sig[b's'] try: canon_policy = CanonicalizationPolicy.from_c_value(sig.get(b'c')) except InvalidCanonicalizationPolicyError as e: raise MessageFormatError("invalid c= value: %s" % e.args[0]) headers = canon_policy.canonicalize_headers(self.headers) body = canon_policy.canonicalize_body(self.body) try: hasher = HASH_ALGORITHMS[sig[b'a']] except KeyError as e: raise MessageFormatError("unknown signature algorithm: %s" % e.args[0]) if b'l' in sig: body = body[:int(sig[b'l'])] h = hasher() h.update(body) bodyhash = h.digest() logger.debug("bh: %s" % base64.b64encode(bodyhash)) try: bh = base64.b64decode(re.sub(br"\s+", b"", sig[b'bh'])) except TypeError as e: raise MessageFormatError(str(e)) if bodyhash != bh: raise ValidationError( "body hash mismatch (got %s, expected %s)" % (base64.b64encode(bodyhash), sig[b'bh'])) name = sig[b's'] + b"._domainkey." + sig[b'd'] + b"." s = dnsfunc(name) if not s: raise KeyFormatError("missing public key: %s"%name) try: if type(s) is str: s = s.encode('ascii') pub = parse_tag_value(s) except InvalidTagValueList as e: raise KeyFormatError(e) try: pk = parse_public_key(base64.b64decode(pub[b'p'])) self.keysize = bitsize(pk['modulus']) except KeyError: raise KeyFormatError("incomplete public key: %s" % s) except (TypeError,UnparsableKeyError) as e: raise KeyFormatError("could not parse public key (%s): %s" % (pub[b'p'],e)) include_headers = [x.lower() for x in re.split(br"\s*:\s*", sig[b'h'])] self.include_headers = tuple(include_headers) # address bug#644046 by including any additional From header # fields when verifying. Since there should be only one From header, # this shouldn't break any legitimate messages. This could be # generalized to check for extras of other singleton headers. if b'from' in include_headers: include_headers.append(b'from') h = hasher() self.signed_headers = hash_headers( h, canon_policy, headers, include_headers, sigheaders[idx], sig) try: signature = base64.b64decode(re.sub(br"\s+", b"", sig[b'b'])) res = RSASSA_PKCS1_v1_5_verify(h, signature, pk) if res and self.keysize < self.minkey: raise KeyFormatError("public key too small: %d" % self.keysize) return res except (TypeError,DigestTooLargeError) as e: raise KeyFormatError("digest too large for modulus: %s"%e)
def sign(self, selector, domain, privkey, identity=None, canonicalize=(b'relaxed',b'simple'), include_headers=None, length=False): try: pk = parse_pem_private_key(privkey) except UnparsableKeyError as e: raise KeyFormatError(str(e)) if identity is not None and not identity.endswith(domain): raise ParameterError("identity must end with domain") canon_policy = CanonicalizationPolicy.from_c_value( b'/'.join(canonicalize)) headers = canon_policy.canonicalize_headers(self.headers) if include_headers is None: include_headers = self.default_sign_headers() # rfc4871 says FROM is required if b'from' not in ( x.lower() for x in include_headers ): raise ParameterError("The From header field MUST be signed") # raise exception for any SHOULD_NOT headers, call can modify # SHOULD_NOT if really needed. for x in include_headers: if x.lower() in self.should_not_sign: raise ParameterError("The %s header field SHOULD NOT be signed"%x) body = canon_policy.canonicalize_body(self.body) hasher = HASH_ALGORITHMS[self.signature_algorithm] h = hasher() h.update(body) bodyhash = base64.b64encode(h.digest()) sigfields = [x for x in [ (b'v', b"1"), (b'a', self.signature_algorithm), (b'c', canon_policy.to_c_value()), (b'd', domain), (b'i', identity or b"@"+domain), length and (b'l', len(body)), (b'q', b"dns/txt"), (b's', selector), (b't', str(int(time.time())).encode('ascii')), (b'h', b" : ".join(include_headers)), (b'bh', bodyhash), # Force b= to fold onto it's own line so that refolding after # adding sig doesn't change whitespace for previous tags. (b'b', b'0'*60), ] if x] include_headers = [x.lower() for x in include_headers] # record what verify should extract self.include_headers = tuple(include_headers) sig_value = fold(b"; ".join(b"=".join(x) for x in sigfields)) sig_value = RE_BTAG.sub(b'\\1',sig_value) dkim_header = (b'DKIM-Signature', b' ' + sig_value) h = hasher() sig = dict(sigfields) self.signed_headers = hash_headers( h, canon_policy, headers, include_headers, dkim_header,sig) self.logger.debug("sign headers: %r" % self.signed_headers) try: sig2 = RSASSA_PKCS1_v1_5_sign(h, pk) except DigestTooLargeError: raise ParameterError("digest too large for modulus") # Folding b= is explicity allowed, but yahoo and live.com are broken #sig_value += base64.b64encode(bytes(sig2)) # Instead of leaving unfolded (which lets an MTA fold it later and still # breaks yahoo and live.com), we change the default signing mode to # relaxed/simple (for broken receivers), and fold now. sig_value = fold(sig_value + base64.b64encode(bytes(sig2))) self.domain = domain self.selector = selector self.signature_fields = sig return b'DKIM-Signature: ' + sig_value + b"\r\n"
def assertCValue(self, c_value, header_algo, body_algo): self.assertEqual( c_value, CanonicalizationPolicy(header_algo, body_algo).to_c_value())
def assertAlgorithms(self, header_algo, body_algo, c_value): p = CanonicalizationPolicy.from_c_value(c_value) self.assertEqual((header_algo, body_algo), (p.header_algorithm, p.body_algorithm))