def _map_log_id_to_verifier(log_list): """Returns a map from log id to verifier object from the log_list.""" log_id_to_verifier = {} for log_key in log_list.values(): key_info = verify.create_key_info_from_raw_key(log_key) key_id = hashlib.sha256(log_key).digest() log_id_to_verifier[key_id] = verify.LogVerifier(key_info) return log_id_to_verifier
def verify_sct(chain, sct_tls, log_key_pem): sct = client_pb2.SignedCertificateTimestamp() tls_message.decode(sct_tls, sct) log_key = pem.from_pem(log_key_pem, 'PUBLIC KEY')[0] key_info = verify.create_key_info_from_raw_key(log_key) lv = verify.LogVerifier(key_info) print lv.verify_sct(sct, chain)
def verify_sct(chain, sct_tls, log_key_pem): sct = client_pb2.SignedCertificateTimestamp() tls_message.decode(sct_tls, sct) key_info = client_pb2.KeyInfo() key_info.type = client_pb2.KeyInfo.ECDSA key_info.pem_key = log_key_pem lv = verify.LogVerifier(key_info) print lv.verify_sct(sct, chain)
def _test_verify_embedded_scts(self, chain): chain = map(lambda name: cert.Certificate.from_pem_file( os.path.join(FLAGS.testdata_dir, name)), chain) key_info = client_pb2.KeyInfo() key_info.type = client_pb2.KeyInfo.ECDSA key_info.pem_key = read_testdata_file('ct-server-key-public.pem') verifier = verify.LogVerifier(key_info) return verifier.verify_embedded_scts(chain)
def _map_log_id_to_verifier(log_list): """Returns a map from log id to verifier object from the log_list.""" log_id_to_verifier = {} for log_key in log_list.values(): key_id = hashlib.sha256(log_key).digest() key_info = client_pb2.KeyInfo() key_info.type = client_pb2.KeyInfo.ECDSA key_info.pem_key = pem.to_pem(log_key, 'PUBLIC KEY') log_id_to_verifier[key_id] = verify.LogVerifier(key_info) return log_id_to_verifier
def test_verify_sct_valid_signature(self): test_cert = cert.Certificate.from_pem_file( os.path.join(FLAGS.testdata_dir, 'test-cert.pem')) sct = client_pb2.SignedCertificateTimestamp() tls_message.decode(read_testdata_file('test-cert.proof'), sct) key_info = client_pb2.KeyInfo() key_info.type = client_pb2.KeyInfo.ECDSA key_info.pem_key = read_testdata_file('ct-server-key-public.pem') verifier = verify.LogVerifier(key_info) self.assertTrue(verifier.verify_sct(sct, test_cert))
def test_verify_sth_temporal_consistency_reversed_timestamps(self): old_sth = LogVerifierTest.default_sth new_sth = client_pb2.SthResponse() new_sth.CopyFrom(old_sth) new_sth.timestamp = old_sth.timestamp + 1 new_sth.tree_size = old_sth.tree_size + 1 # Merkle verifier is never used so simply set to None verifier = verify.LogVerifier(LogVerifierTest.default_key_info, None) self.assertRaises(ValueError, verifier.verify_sth_temporal_consistency, new_sth, old_sth)
def test_verify_sth_temporal_consistency(self): old_sth = LogVerifierTest.default_sth new_sth = client_pb2.SthResponse() new_sth.CopyFrom(old_sth) new_sth.tree_size = old_sth.tree_size + 1 new_sth.timestamp = old_sth.timestamp + 1 # Merkle verifier is never used so simply set to None verifier = verify.LogVerifier(LogVerifierTest.default_key_info, None) # Note we do not care about root hash inconsistency here. self.assertTrue( verifier.verify_sth_temporal_consistency(old_sth, new_sth))
def test_verify_sth_temporal_consistency_newer_tree_is_smaller(self): old_sth = self.sth_fixture new_sth = client_pb2.SthResponse() new_sth.CopyFrom(old_sth) new_sth.timestamp = old_sth.timestamp + 1 new_sth.tree_size = old_sth.tree_size - 1 # Merkle verifier is never used so simply set to None verifier = verify.LogVerifier(self.key_info_fixture, None) self.assertRaises(error.ConsistencyError, verifier.verify_sth_temporal_consistency, old_sth, new_sth)
def test_verify_sth_for_bad_asn1_signature(self): # www.google.com certificate for which a bad SCT was issued. google_cert = ( '-----BEGIN CERTIFICATE-----', 'MIIEgDCCA2igAwIBAgIIdJ7+eILLLSgwDQYJKoZIhvcNAQELBQAwSTELMAkGA1UE', 'BhMCVVMxEzARBgNVBAoTCkdvb2dsZSBJbmMxJTAjBgNVBAMTHEdvb2dsZSBJbnRl', 'cm5ldCBBdXRob3JpdHkgRzIwHhcNMTUxMDA3MTExMDM4WhcNMTYwMTA1MDAwMDAw', 'WjBoMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwN', 'TW91bnRhaW4gVmlldzETMBEGA1UECgwKR29vZ2xlIEluYzEXMBUGA1UEAwwOd3d3', 'Lmdvb2dsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCR6Knj', 'TG6eyvY6C1VO7daC0AbWe3cenr9y9lVFQH2ej5r87znUvep4pC/bmG71aTd25wds', 'ScpclWNR4lkR9Ph45j8K+SjMXU7syiqFiWPWgVzyi4N3bXZw4w83RoTzfyUTn4Kx', '9nsQLmjVS4wUMSEpWBmYfORwUwMF8BYp5qSkIUogZTADPY7Qr8tmwEq8jLHv9z62', 'SiYd9JEcGdhnajgXg/+/f+iIb1jhkbjsTjFJBHClgrtRqLZHSU1THZCK6iULTd1B', '4yBNvXcHDaSBTPUSvZvZXo/msKfOqd0fHtny1icgl5CSU0tZrZPteomMnLMGdLlN', 'KHyqIX7XsAd3pNoXAgMBAAGjggFLMIIBRzAdBgNVHSUEFjAUBggrBgEFBQcDAQYI', 'KwYBBQUHAwIwGQYDVR0RBBIwEIIOd3d3Lmdvb2dsZS5jb20waAYIKwYBBQUHAQEE', 'XDBaMCsGCCsGAQUFBzAChh9odHRwOi8vcGtpLmdvb2dsZS5jb20vR0lBRzIuY3J0', 'MCsGCCsGAQUFBzABhh9odHRwOi8vY2xpZW50czEuZ29vZ2xlLmNvbS9vY3NwMB0G', 'A1UdDgQWBBSUPOkxr+tGC3JYs2JIdXVB2R+f8zAMBgNVHRMBAf8EAjAAMB8GA1Ud', 'IwQYMBaAFErdBhYbvPZotXb1gba7Yhq6WoEvMCEGA1UdIAQaMBgwDAYKKwYBBAHW', 'eQIFATAIBgZngQwBAgIwMAYDVR0fBCkwJzAloCOgIYYfaHR0cDovL3BraS5nb29n', 'bGUuY29tL0dJQUcyLmNybDANBgkqhkiG9w0BAQsFAAOCAQEAfBoIl5qeaJ7NZ6hB', 'WqeBZwbDV/DOHCPg3/84n8YGlfYdfXQpQdOWC5hfgEkkinBT0yp8dDTdXMUIT9Al', 'ZMrxE54xJ1cU6FPuZPDWOnzV+6YEW6P9RnTbqKgYCNkHFiFwVvFRm5RTEGei5TLv', 'l0zFDBusT/mgyvYBMIfW3vVPteEKKEz+aRCZHRiLAHbmJHj2+blVJeHGSF+eKN5q', 'GWgk7/pMww4JAXsLQ0mmL8qdJKivuiNcyyhbr8IeERiVcItKqfBsX1nwyUnYFWY3', 'HPkV+sXAPnpTGuxgYvTjcYDf8UO9lgDX5QubEFjjTuTIYAAabmc6Z4UKOS0O46Ne', 'z28m7Q==', '-----END CERTIFICATE-----') # The SCT with the bad signature. sct_bytes = ( '00ddeb1d2b7a0d4fa6208b81ad8168707e2e8e9d01d55c888d3d11c4cdb6ecbecc' '00000150421dfbb6000004030047304502200035de73784699d2ad8c3631aeda77' 'f70b2c899492b16f051fd6d38d46afc892022100a4d1b58c63002e5d0862a9f623' 'f67c8ccf5fc934bd28133fbc8f240aae4cab38' ).decode('hex') symantec_sct = client_pb2.SignedCertificateTimestamp() tls_message.decode(sct_bytes, symantec_sct) key_info = client_pb2.KeyInfo() key_info.type = client_pb2.KeyInfo.ECDSA key_info.pem_key = pem.to_pem( base64.decodestring(SYMANTEC_B64_KEY), 'PUBLIC KEY') verifier = verify.LogVerifier(key_info) self.assertRaises( error.SignatureError, verifier.verify_sct, symantec_sct, [cert.Certificate.from_pem("\n".join(google_cert)),])
def _test_verify_sct(self, proof, chain, fake_timestamp = None): sct = client_pb2.SignedCertificateTimestamp() tls_message.decode(read_testdata_file(proof), sct) if fake_timestamp is not None: sct.timestamp = fake_timestamp chain = map(lambda name: cert.Certificate.from_pem_file( os.path.join(FLAGS.testdata_dir, name)), chain) key_info = client_pb2.KeyInfo() key_info.type = client_pb2.KeyInfo.ECDSA key_info.pem_key = read_testdata_file('ct-server-key-public.pem') verifier = verify.LogVerifier(key_info) return verifier.verify_sct(sct, chain)
def __init__(self, ct_logs, db, cert_db, temp_db_factory, monitor_state_dir, agent=None, state_keeper_class=None): """Initialize from a CtLogs proto.""" threading.Thread.__init__(self) self.__monitors = [] self.__db = db if not agent: agent = twisted_client.Agent(reactor) if not state_keeper_class: state_keeper_class = state.StateKeeper for log in ct_logs.ctlog: if not log.log_server or not log.log_id or not log.public_key_info: raise RuntimeError("Cannot start monitor: log proto has " "missing or empty fields: %s" % log) try: temp_db = temp_db_factory.create_storage(log.log_server) client = async_log_client.AsyncLogClient( agent, log.log_server, temp_db) hasher = merkle.TreeHasher() verifier = verify.LogVerifier(log.public_key_info, merkle.MerkleVerifier(hasher)) # Convert from standard Base64 to URL-safe Base64 so that the # log ID can be used as part of a file path. log_id_urlsafe = log.log_id.replace('/', '_').replace('+', '-') state_keeper = state_keeper_class(monitor_state_dir + "/" + log_id_urlsafe) log_key = db.get_log_id(log.log_server) self.__monitors.append( monitor.Monitor(client, verifier, hasher, db, cert_db, log_key, state_keeper)) except: logging.error("Error starting monitor for log: %s" % log) raise self.__last_update_start_time = 0 self.__stopped = False self.__called_later = None
def test_verify_sth_consistency_invalid_proof(self): old_sth = LogVerifierTest.default_sth new_sth = client_pb2.SthResponse() new_sth.CopyFrom(old_sth) new_sth.tree_size = old_sth.tree_size + 1 new_sth.timestamp = old_sth.timestamp + 1 new_sth.sha256_root_hash = "a new hash" proof = ["some proof the mock does not care about"] mock_merkle_verifier = mock.Mock() mock_merkle_verifier.verify_tree_consistency.side_effect = ( error.ConsistencyError("Evil")) verifier = verify.LogVerifier(LogVerifierTest.default_key_info, mock_merkle_verifier) self.assertRaises(error.ConsistencyError, verifier.verify_sth_consistency, old_sth, new_sth, proof)
def test_verify_sth_fails_for_bad_signature(self): verifier = verify.LogVerifier(LogVerifierTest.default_key_info) default_sth = LogVerifierTest.default_sth for i in range(len(default_sth.tree_head_signature)): # Skip the bytes that encode ASN.1 lengths: this is covered in a # separate test if i == 5 or i == 7 or i == 42: continue sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) ^ 1) + default_sth.tree_head_signature[i+1:]) # Encoding- or SignatureError, depending on whether the modified # byte is a content byte or not. self.assertRaises((error.EncodingError, error.SignatureError), verifier.verify_sth, sth)
def test_verify_sth_consistency(self): old_sth = LogVerifierTest.default_sth new_sth = client_pb2.SthResponse() new_sth.CopyFrom(old_sth) new_sth.tree_size = old_sth.tree_size + 1 new_sth.timestamp = old_sth.timestamp + 1 new_sth.sha256_root_hash = "a new hash" proof = ["some proof the mock does not care about"] mock_merkle_verifier = mock.Mock() mock_merkle_verifier.verify_tree_consistency.return_value = True verifier = verify.LogVerifier(LogVerifierTest.default_key_info, mock_merkle_verifier) self.assertTrue(verifier.verify_sth_consistency(old_sth, new_sth, proof)) mock_merkle_verifier.verify_tree_consistency.assert_called_once_with( old_sth.tree_size, new_sth.tree_size, old_sth.sha256_root_hash, new_sth.sha256_root_hash, proof)
def test_verify_sth_temporal_consistency_equal_timestamps(self): old_sth = LogVerifierTest.default_sth new_sth = client_pb2.SthResponse() new_sth.CopyFrom(old_sth) new_sth.tree_size = old_sth.tree_size + 1 # Merkle verifier is never used so simply set to None verifier = verify.LogVerifier(LogVerifierTest.default_key_info, None) self.assertRaises(error.ConsistencyError, verifier.verify_sth_temporal_consistency, old_sth, new_sth) new_sth.tree_size = old_sth.tree_size - 1 self.assertRaises(error.ConsistencyError, verifier.verify_sth_temporal_consistency, old_sth, new_sth) # But identical STHs are OK self.assertTrue( verifier.verify_sth_temporal_consistency(old_sth, old_sth))
def __init__(self, ct_logs, db, temp_db_factory, monitor_state_dir): """Initialize from a CtLogs proto.""" threading.Thread.__init__(self) self.__monitors = [] self.__db = db for log in ct_logs.ctlog: if not log.log_server or not log.log_id or not log.public_key_info: raise RuntimeError("Cannot start monitor: log proto has " "missing or empty fields: %s" % log) client = log_client.LogClient(log.log_server) hasher = merkle.TreeHasher() verifier = verify.LogVerifier(log.public_key_info, merkle.MerkleVerifier(hasher)) state_keeper = state.StateKeeper(FLAGS.monitor_state_dir + "/" + log.log_id) temp_db = temp_db_factory.create_storage(log.log_server) self.__monitors.append( monitor.Monitor(client, verifier, hasher, db, temp_db, state_keeper)) self.__last_update_start_time = 0 self.__stopped = False
def test_verify_sth_for_bad_asn1_length(self): verifier = verify.LogVerifier(LogVerifierTest.default_key_info) default_sth = LogVerifierTest.default_sth # The byte that encodes the length of the ASN.1 signature sequence i = 5 # Decreasing the length truncates the sequence and causes a decoding # error. sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) - 1) + default_sth.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) # Increasing the length means there are not enough ASN.1 bytes left to # decode the sequence, however the ecdsa module silently slices it. # TODO(ekasper): contribute a patch to upstream and make the tests fail sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) + 1) + default_sth.tree_head_signature[i+1:]) self.assertTrue(verifier.verify_sth(sth)) # The byte that encodes the length of the first integer r in the # sequence (r, s). Modifying the length corrupts the second integer # offset and causes a decoding error. i = 7 sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) - 1) + default_sth.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) + 1) + default_sth.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) # The byte that encodes the length of the second integer s in the # sequence (r, s). Decreasing this length corrupts the integer, however # increased length is silently sliced, as above. i = 42 sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) - 1) + default_sth.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:i] + chr(ord(default_sth.tree_head_signature[i]) + 1) + default_sth.tree_head_signature[i+1:]) self.assertTrue(verifier.verify_sth(sth)) # Trailing garbage is correctly detected. sth = client_pb2.SthResponse() sth.CopyFrom(default_sth) sth.tree_head_signature = ( default_sth.tree_head_signature[:3] + # Correct outer length to include trailing garbage. chr(ord(default_sth.tree_head_signature[3]) + 1) + default_sth.tree_head_signature[4:]) + "\x01" self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
def test_verify_sth(self): verifier = verify.LogVerifier(LogVerifierTest.default_key_info) self.assertTrue(verifier.verify_sth(LogVerifierTest.default_sth))
def test_verify_sth_for_bad_asn1_length(self): verifier = verify.LogVerifier(self.key_info_fixture) sth_fixture = self.sth_fixture # The byte that encodes the length of the ASN.1 signature sequence i = 5 # Decreasing the length truncates the sequence and causes a decoding # error. sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) - 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) # Increasing the length means there are not enough ASN.1 bytes left to # decode the sequence, however the ecdsa module silently slices it. # Our ECDSA verifier checks for it and will fail. sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) + 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises( error.EncodingError, verifier.verify_sth, sth) # The byte that encodes the length of the first integer r in the # sequence (r, s). Modifying the length corrupts the second integer # offset and causes a decoding error. i = 7 sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) - 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) + 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) # The byte that encodes the length of the second integer s in the # sequence (r, s). Increasing this length leaves bytes unread which # is now also detected in the verify_ecdsa module. i = 42 sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) - 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) + 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.EncodingError, verifier.verify_sth, sth) # Trailing garbage is correctly detected. sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:3] + # Correct outer length to include trailing garbage. chr(ord(sth_fixture.tree_head_signature[3]) + 1) + sth_fixture.tree_head_signature[4:]) + "\x01" self.assertRaises(error.EncodingError, verifier.verify_sth, sth)
logurl = 'http://ct.googleapis.com/pilot' logdns = 'pilot.ct.googleapis.com' response = urllib2.urlopen('%s/ct/v1/get-entries?start=%s&end=%s' % (logurl, index, index)) j = response.read() j = json.loads(j) leaf_input = j['entries'][0]['leaf_input'] logging.info('leaf = %s', leaf_input) leaf = base64.b64decode(leaf_input) leaf_hash = hashlib.sha256(chr(0) + leaf).digest() keyinfo = client_pb2.KeyInfo() keyinfo.type = keyinfo.ECDSA keyinfo.pem_key = keypem log_verifier = verify.LogVerifier(keyinfo) lookup = CTDNSLookup(logdns, log_verifier) sth = lookup.GetSTH() logging.info('sth = %s', sth) logging.info('hash = %s', base64.b64encode(leaf_hash)) verifier = merkle.MerkleVerifier() index = int(index) audit_path = [] prev = None apl = verifier.audit_path_length(index, sth.tree_size) for level in range(0, apl): h = lookup.GetEntry(level, index, sth.tree_size) logging.info('hash = %s', base64.b64encode(h)) audit_path.append(h[:32])
def test_verify_sth_for_bad_asn1_length(self): verifier = verify.LogVerifier(self.key_info_fixture) sth_fixture = self.sth_fixture # The byte that encodes the length of the ASN.1 signature sequence i = 5 # Decreasing the length truncates the sequence and causes a decoding # error. sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) - 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.SignatureError, verifier.verify_sth, sth) # Increasing the length means there are not enough ASN.1 bytes left to # decode the sequence. sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) + 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.SignatureError, verifier.verify_sth, sth) # The byte that encodes the length of the first integer r in the # sequence (r, s). Modifying the length corrupts the second integer # offset and causes a decoding error. i = 7 sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) - 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.SignatureError, verifier.verify_sth, sth) sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) + 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.SignatureError, verifier.verify_sth, sth) # The byte that encodes the length of the second integer s in the # sequence (r, s). Modifying the length corrupts the integer and causes # a decoding error. i = 42 sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) - 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.SignatureError, verifier.verify_sth, sth) sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:i] + chr(ord(sth_fixture.tree_head_signature[i]) + 1) + sth_fixture.tree_head_signature[i+1:]) self.assertRaises(error.SignatureError, verifier.verify_sth, sth) # Trailing garbage is correctly detected. sth = client_pb2.SthResponse() sth.CopyFrom(sth_fixture) sth.tree_head_signature = ( sth_fixture.tree_head_signature[:3] + # Correct outer length to include trailing garbage. chr(ord(sth_fixture.tree_head_signature[3]) + 1) + sth_fixture.tree_head_signature[4:]) + "\x01" self.assertRaises(error.SignatureError, verifier.verify_sth, sth)
def test_verify_sth(self): verifier = verify.LogVerifier(self.key_info_fixture) self.assertTrue(verifier.verify_sth(self.sth_fixture))