def _get_prev_events_and_state(self, txn, event_id, is_state=None): keyvalues = { "event_id": event_id, } if is_state is not None: keyvalues["is_state"] = bool(is_state) res = self._simple_select_list_txn( txn, table="event_edges", keyvalues=keyvalues, retcols=["prev_event_id", "is_state"], ) hashes = self._get_prev_event_hashes_txn(txn, event_id) results = [] for d in res: edge_hash = self._get_event_reference_hashes_txn(txn, d["prev_event_id"]) edge_hash.update(hashes.get(d["prev_event_id"], {})) prev_hashes = { k: encode_base64(v) for k, v in edge_hash.items() if k == "sha256" } results.append((d["prev_event_id"], prev_hashes, d["is_state"])) return results
def _get_prev_events_and_state(self, txn, event_id, is_state=None): keyvalues = { "event_id": event_id, } if is_state is not None: keyvalues["is_state"] = is_state res = self._simple_select_list_txn( txn, table="event_edges", keyvalues=keyvalues, retcols=["prev_event_id", "is_state"], ) hashes = self._get_prev_event_hashes_txn(txn, event_id) results = [] for d in res: edge_hash = self._get_event_reference_hashes_txn( txn, d["prev_event_id"]) edge_hash.update(hashes.get(d["prev_event_id"], {})) prev_hashes = { k: encode_base64(v) for k, v in edge_hash.items() if k == "sha256" } results.append((d["prev_event_id"], prev_hashes, d["is_state"])) return results
def sign_json(json_object, signature_name, signing_key): """Sign the JSON object. Stores the signature in json_object["signatures"]. Args: json_object (dict): The JSON object to sign. signature_name (str): The name of the signing entity. signing_key (syutil.crypto.SigningKey): The key to sign the JSON with. Returns: The modified, signed JSON object.""" signatures = json_object.pop("signatures", {}) unsigned = json_object.pop("unsigned", None) message_bytes = encode_canonical_json(json_object) signed = signing_key.sign(message_bytes) signature_base64 = encode_base64(signed.signature) key_id = "%s:%s" % (signing_key.alg, signing_key.version) signatures.setdefault(signature_name, {})[key_id] = signature_base64 #logger.debug("SIGNING: %s %s %s", signature_name, key_id, message_bytes) json_object["signatures"] = signatures if unsigned is not None: json_object["unsigned"] = unsigned return json_object
def select_pdus(cursor): cursor.execute( "SELECT pdu_id, origin FROM pdus ORDER BY depth ASC" ) ids = cursor.fetchall() pdu_tuples = store._get_pdu_tuples(cursor, ids) pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples] reference_hashes = {} for pdu in pdus: try: if pdu.prev_pdus: print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus for pdu_id, origin, hashes in pdu.prev_pdus: ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)] hashes[ref_alg] = encode_base64(ref_hsh) store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh) print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus pdu = add_event_pdu_content_hash(pdu) ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu) reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh) store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh) for alg, hsh_base64 in pdu.hashes.items(): print alg, hsh_base64 store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)) except: print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
def test_sign_and_verify(self): self.assertIn('signatures', self.signed) self.assertIn('Alice', self.signed['signatures']) self.assertIn('mock:test', self.signed['signatures']['Alice']) self.assertEqual(self.signed['signatures']['Alice']['mock:test'], encode_base64('x_______')) verify_signed_json(self.signed, 'Alice', self.verkey)
def encode_signing_key_base64(key): """Encode a signing key as base64 Args: key (SigningKey): A signing key to encode. Returns: base64 encoded string. """ return encode_base64(key.encode())
def test_verify_fail(self): self.signed['signatures']['Alice']['mock:test'] = encode_base64( 'not a signature') print self.signed verkey = MockVerifyKey() with self.assertRaises(SignatureVerifyException): verify_signed_json(self.signed, 'Alice', self.verkey)
def response_json_object(key_server): verify_key_bytes = key_server.signing_key.verify_key.encode() x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, key_server.tls_certificate ) json_object = { u"server_name": key_server.server_name, u"signature_verify_key": encode_base64(verify_key_bytes), u"tls_certificate": encode_base64(x509_certificate_bytes) } signed_json = sign_json( json_object, key_server.server_name, key_server.signing_key ) return signed_json
def select_v1_keys(connection): cursor = connection.cursor() cursor.execute("SELECT server_name, key_id, verify_key FROM server_signature_keys") rows = cursor.fetchall() cursor.close() results = {} for server_name, key_id, verify_key in rows: results.setdefault(server_name, {})[key_id] = encode_base64(verify_key) return results
def test_verify_fail(self): self.signed['signatures']['Alice']['mock:test'] = encode_base64( 'not a signature' ) print self.signed verkey = MockVerifyKey() with self.assertRaises(SignatureVerifyException): verify_signed_json(self.signed, 'Alice', self.verkey)
def test_sign_and_verify(self): self.assertIn('signatures', self.signed) self.assertIn('Alice', self.signed['signatures']) self.assertIn('mock:test', self.signed['signatures']['Alice']) self.assertEqual( self.signed['signatures']['Alice']['mock:test'], encode_base64('x_______') ) verify_signed_json(self.signed, 'Alice', self.verkey)
def get_server_verify_key_v2_direct(self, server_name, key_ids): keys = {} for requested_key_id in key_ids: if requested_key_id in keys: continue (response, tls_certificate) = yield fetch_server_key( server_name, self.hs.tls_context_factory, path=(b"/_matrix/key/v2/server/%s" % ( urllib.quote(requested_key_id), )).encode("ascii"), ) if (u"signatures" not in response or server_name not in response[u"signatures"]): raise ValueError("Key response not signed by remote server") if "tls_fingerprints" not in response: raise ValueError("Key response missing TLS fingerprints") certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, tls_certificate ) sha256_fingerprint = hashlib.sha256(certificate_bytes).digest() sha256_fingerprint_b64 = encode_base64(sha256_fingerprint) response_sha256_fingerprints = set() for fingerprint in response[u"tls_fingerprints"]: if u"sha256" in fingerprint: response_sha256_fingerprints.add(fingerprint[u"sha256"]) if sha256_fingerprint_b64 not in response_sha256_fingerprints: raise ValueError("TLS certificate not allowed by fingerprints") response_keys = yield self.process_v2_response( from_server=server_name, requested_ids=[requested_key_id], response_json=response, ) keys.update(response_keys) yield defer.gatherResults( [ self.store_keys( server_name=key_server_name, from_server=server_name, verify_keys=verify_keys, ) for key_server_name, verify_keys in keys.items() ], consumeErrors=True ).addErrback(unwrapFirstError) defer.returnValue(keys)
def get_server_verify_key_v2_direct(self, server_name, key_ids): keys = {} for requested_key_id in key_ids: if requested_key_id in keys: continue (response, tls_certificate) = yield fetch_server_key( server_name, self.hs.tls_context_factory, path=(b"/_matrix/key/v2/server/%s" % (urllib.quote(requested_key_id), )).encode("ascii"), ) if (u"signatures" not in response or server_name not in response[u"signatures"]): raise ValueError("Key response not signed by remote server") if "tls_fingerprints" not in response: raise ValueError("Key response missing TLS fingerprints") certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, tls_certificate) sha256_fingerprint = hashlib.sha256(certificate_bytes).digest() sha256_fingerprint_b64 = encode_base64(sha256_fingerprint) response_sha256_fingerprints = set() for fingerprint in response[u"tls_fingerprints"]: if u"sha256" in fingerprint: response_sha256_fingerprints.add(fingerprint[u"sha256"]) if sha256_fingerprint_b64 not in response_sha256_fingerprints: raise ValueError("TLS certificate not allowed by fingerprints") response_keys = yield self.process_v2_response( from_server=server_name, requested_ids=[requested_key_id], response_json=response, ) keys.update(response_keys) yield defer.gatherResults( [ self.store_keys( server_name=key_server_name, from_server=server_name, verify_keys=verify_keys, ) for key_server_name, verify_keys in keys.items() ], consumeErrors=True).addErrback(unwrapFirstError) defer.returnValue(keys)
def add_auth_events(self, event): if event.type == RoomCreateEvent.TYPE: event.auth_events = [] return auth_events = [] key = (RoomPowerLevelsEvent.TYPE, "", ) power_level_event = event.old_state_events.get(key) if power_level_event: auth_events.append(power_level_event.event_id) key = (RoomJoinRulesEvent.TYPE, "", ) join_rule_event = event.old_state_events.get(key) key = (RoomMemberEvent.TYPE, event.user_id, ) member_event = event.old_state_events.get(key) key = (RoomCreateEvent.TYPE, "", ) create_event = event.old_state_events.get(key) if create_event: auth_events.append(create_event.event_id) if join_rule_event: join_rule = join_rule_event.content.get("join_rule") is_public = join_rule == JoinRules.PUBLIC if join_rule else False else: is_public = False if event.type == RoomMemberEvent.TYPE: e_type = event.content["membership"] if e_type in [Membership.JOIN, Membership.INVITE]: if join_rule_event: auth_events.append(join_rule_event.event_id) if member_event and not is_public: auth_events.append(member_event.event_id) elif member_event: if member_event.content["membership"] == Membership.JOIN: auth_events.append(member_event.event_id) hashes = yield self.store.get_event_reference_hashes( auth_events ) hashes = [ { k: encode_base64(v) for k, v in h.items() if k == "sha256" } for h in hashes ] event.auth_events = zip(auth_events, hashes)
def add_hashes_and_signatures(event, signature_name, signing_key, hash_algorithm=hashlib.sha256): if hasattr(event, "old_state_events"): state_json_bytes = encode_canonical_json( [e.event_id for e in event.old_state_events.values()] ) hashed = hash_algorithm(state_json_bytes) event.state_hash = { hashed.name: encode_base64(hashed.digest()) } hashed = _compute_content_hash(event, hash_algorithm=hash_algorithm) if not hasattr(event, "hashes"): event.hashes = {} event.hashes[hashed.name] = encode_base64(hashed.digest()) event.signatures = compute_event_signature( event, signature_name=signature_name, signing_key=signing_key, )
def add_event_hashes(self, event_ids): hashes = yield self.get_event_reference_hashes( event_ids ) hashes = [ { k: encode_base64(v) for k, v in h.items() if k == "sha256" } for h in hashes ] defer.returnValue(zip(event_ids, hashes))
def generate_config(cls, args, config_dir_path): super(ServerConfig, cls).generate_config(args, config_dir_path) base_key_name = os.path.join(config_dir_path, args.server_name) args.pid_file = os.path.abspath(args.pid_file) if not args.signing_key_path: args.signing_key_path = base_key_name + ".signing.key" if not os.path.exists(args.signing_key_path): with open(args.signing_key_path, "w") as signing_key_file: key = nacl.signing.SigningKey.generate() signing_key_file.write(encode_base64(key.encode()))
def response_json_object(server_config): verify_keys = {} for key in server_config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = encode_base64(verify_key_bytes) x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, server_config.tls_certificate) json_object = { u"server_name": server_config.server_name, u"verify_keys": verify_keys, u"tls_certificate": encode_base64(x509_certificate_bytes) } for key in server_config.signing_key: json_object = sign_json( json_object, server_config.server_name, key, ) return json_object
def response_json_object(self): verify_keys = {} for key in self.config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = {u"key": encode_base64(verify_key_bytes)} old_verify_keys = {} for key in self.config.old_signing_keys: key_id = "%s:%s" % (key.alg, key.version) verify_key_bytes = key.encode() old_verify_keys[key_id] = { u"key": encode_base64(verify_key_bytes), u"expired_ts": key.expired, } x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, self.config.tls_certificate) sha256_fingerprint = sha256(x509_certificate_bytes).digest() json_object = { u"valid_until_ts": self.valid_until_ts, u"server_name": self.config.server_name, u"verify_keys": verify_keys, u"old_verify_keys": old_verify_keys, u"tls_fingerprints": [{ u"sha256": encode_base64(sha256_fingerprint), }] } for key in self.config.signing_key: json_object = sign_json( json_object, self.config.server_name, key, ) return json_object
def response_json_object(server_config): verify_keys = {} for key in server_config.signing_key: verify_key_bytes = key.verify_key.encode() key_id = "%s:%s" % (key.alg, key.version) verify_keys[key_id] = encode_base64(verify_key_bytes) x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, server_config.tls_certificate ) json_object = { u"server_name": server_config.server_name, u"verify_keys": verify_keys, u"tls_certificate": encode_base64(x509_certificate_bytes) } for key in server_config.signing_key: json_object = sign_json( json_object, server_config.server_name, key, ) return json_object
def _get_latest_events_in_room(self, txn, room_id): sql = ("SELECT e.event_id, e.depth FROM events as e " "INNER JOIN event_forward_extremities as f " "ON e.event_id = f.event_id " "WHERE f.room_id = ?") txn.execute(sql, (room_id, )) results = [] for event_id, depth in txn.fetchall(): hashes = self._get_event_reference_hashes_txn(txn, event_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" } results.append((event_id, prev_hashes, depth)) return results
def _get_auth_events(self, txn, event_id): auth_ids = self._simple_select_onecol_txn( txn, table="event_auth", keyvalues={ "event_id": event_id, }, retcol="auth_id", ) results = [] for auth_id in auth_ids: hashes = self._get_event_reference_hashes_txn(txn, auth_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" } results.append((auth_id, prev_hashes)) return results
def _get_latest_events_in_room(self, txn, room_id): sql = ( "SELECT e.event_id, e.depth FROM events as e " "INNER JOIN event_forward_extremities as f " "ON e.event_id = f.event_id " "WHERE f.room_id = ?" ) txn.execute(sql, (room_id, )) results = [] for event_id, depth in txn.fetchall(): hashes = self._get_event_reference_hashes_txn(txn, event_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" } results.append((event_id, prev_hashes, depth)) return results
def _async_render_GET(self, request): try: server_keys, certificate = yield fetch_server_key( self.server_name, self.key_server.ssl_context_factory ) resp_server_name = server_keys[u"server_name"] verify_key_b64 = server_keys[u"signature_verify_key"] tls_certificate_b64 = server_keys[u"tls_certificate"] verify_key = VerifyKey(decode_base64(verify_key_b64)) if resp_server_name != self.server_name: raise ValueError("Wrong server name '%s' != '%s'" % (resp_server_name, self.server_name)) x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, certificate ) if encode_base64(x509_certificate_bytes) != tls_certificate_b64: raise ValueError("TLS certificate doesn't match") verify_signed_json(server_keys, self.server_name, verify_key) signed_json = sign_json( server_keys, self.key_server.server_name, self.key_server.signing_key ) json_bytes = encode_canonical_json(signed_json) respond_with_json_bytes(request, 200, json_bytes) except Exception as e: json_bytes = encode_canonical_json({ u"error": {u"code": 502, u"message": e.message} }) respond_with_json_bytes(request, 502, json_bytes)
def main(): parser = argparse.ArgumentParser() parser.add_argument("input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin) args = parser.parse_args() logging.basicConfig() event_json = dictobj(json.load(args.input_json)) algorithms = { "sha256": hashlib.sha256, } for alg_name in event_json.hashes: if check_event_content_hash(event_json, algorithms[alg_name]): print "PASS content hash %s" % (alg_name,) else: print "FAIL content hash %s" % (alg_name,) for algorithm in algorithms.values(): name, h_bytes = compute_event_reference_hash(event_json, algorithm) print "Reference hash %s: %s" % (name, encode_base64(h_bytes))
def _get_latest_state_in_room(self, txn, room_id, type, state_key): event_ids = self._simple_select_onecol_txn( txn, table="state_forward_extremities", keyvalues={ "room_id": room_id, "type": type, "state_key": state_key, }, retcol="event_id", ) results = [] for event_id in event_ids: hashes = self._get_event_reference_hashes_txn(txn, event_id) prev_hashes = { k: encode_base64(v) for k, v in hashes.items() if k == "sha256" } results.append((event_id, prev_hashes)) return results
def check_event_content_hash(event, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" computed_hash = _compute_content_hash(event, hash_algorithm) logger.debug("Expecting hash: %s", encode_base64(computed_hash.digest())) if computed_hash.name not in event.hashes: raise SynapseError( 400, "Algorithm %s not in hashes %s" % ( computed_hash.name, list(event.hashes), ), Codes.UNAUTHORIZED, ) message_hash_base64 = event.hashes[computed_hash.name] try: message_hash_bytes = decode_base64(message_hash_base64) except: raise SynapseError( 400, "Invalid base64: %s" % (message_hash_base64,), Codes.UNAUTHORIZED, ) return message_hash_bytes == computed_hash.digest()
def check_event_content_hash(event, hash_algorithm=hashlib.sha256): """Check whether the hash for this PDU matches the contents""" name, expected_hash = compute_content_hash(event, hash_algorithm) logger.debug("Expecting hash: %s", encode_base64(expected_hash)) if name not in event.hashes: raise SynapseError( 400, "Algorithm %s not in hashes %s" % ( name, list(event.hashes), ), Codes.UNAUTHORIZED, ) message_hash_base64 = event.hashes[name] try: message_hash_bytes = decode_base64(message_hash_base64) except: raise SynapseError( 400, "Invalid base64: %s" % (message_hash_base64, ), Codes.UNAUTHORIZED, ) return message_hash_bytes == expected_hash
def select_pdus(cursor): cursor.execute("SELECT pdu_id, origin FROM pdus ORDER BY depth ASC") ids = cursor.fetchall() pdu_tuples = store._get_pdu_tuples(cursor, ids) pdus = [Pdu.from_pdu_tuple(p) for p in pdu_tuples] reference_hashes = {} for pdu in pdus: try: if pdu.prev_pdus: print "PROCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus for pdu_id, origin, hashes in pdu.prev_pdus: ref_alg, ref_hsh = reference_hashes[(pdu_id, origin)] hashes[ref_alg] = encode_base64(ref_hsh) store._store_prev_pdu_hash_txn(cursor, pdu.pdu_id, pdu.origin, pdu_id, origin, ref_alg, ref_hsh) print "SUCCESS", pdu.pdu_id, pdu.origin, pdu.prev_pdus pdu = add_event_pdu_content_hash(pdu) ref_alg, ref_hsh = compute_pdu_event_reference_hash(pdu) reference_hashes[(pdu.pdu_id, pdu.origin)] = (ref_alg, ref_hsh) store._store_pdu_reference_hash_txn(cursor, pdu.pdu_id, pdu.origin, ref_alg, ref_hsh) for alg, hsh_base64 in pdu.hashes.items(): print alg, hsh_base64 store._store_pdu_content_hash_txn(cursor, pdu.pdu_id, pdu.origin, alg, decode_base64(hsh_base64)) except: print "FAILED_", pdu.pdu_id, pdu.origin, pdu.prev_pdus
def add_hashes_and_signatures(event, signature_name, signing_key, hash_algorithm=hashlib.sha256): # if hasattr(event, "old_state_events"): # state_json_bytes = encode_canonical_json( # [e.event_id for e in event.old_state_events.values()] # ) # hashed = hash_algorithm(state_json_bytes) # event.state_hash = { # hashed.name: encode_base64(hashed.digest()) # } name, digest = compute_content_hash(event, hash_algorithm=hash_algorithm) if not hasattr(event, "hashes"): event.hashes = {} event.hashes[name] = encode_base64(digest) event.signatures = compute_event_signature( event, signature_name=signature_name, signing_key=signing_key, )
def _generate_event_json(self, txn, rows): events = [] for row in rows: d = dict(row) d.pop("stream_ordering", None) d.pop("topological_ordering", None) d.pop("processed", None) if "origin_server_ts" not in d: d["origin_server_ts"] = d.pop("ts", 0) else: d.pop("ts", 0) d.pop("prev_state", None) d.update(json.loads(d.pop("unrecognized_keys"))) d["sender"] = d.pop("user_id") d["content"] = json.loads(d["content"]) if "age_ts" not in d: # For compatibility d["age_ts"] = d.get("origin_server_ts", 0) d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts") outlier = d.pop("outlier", False) # d.pop("membership", None) d.pop("state_hash", None) d.pop("replaces_state", None) b = EventBuilder(d) b.internal_metadata.outlier = outlier events.append(b) for i, ev in enumerate(events): signatures = self._get_event_signatures_txn( txn, ev.event_id, ) ev.signatures = { n: { k: encode_base64(v) for k, v in s.items() } for n, s in signatures.items() } hashes = self._get_event_content_hashes_txn( txn, ev.event_id, ) ev.hashes = { k: encode_base64(v) for k, v in hashes.items() } prevs = self._get_prev_events_and_state(txn, ev.event_id) ev.prev_events = [ (e_id, h) for e_id, h, is_state in prevs if is_state == 0 ] # ev.auth_events = self._get_auth_events(txn, ev.event_id) hashes = dict(ev.auth_events) for e_id, hash in ev.prev_events: if e_id in hashes and not hash: hash.update(hashes[e_id]) # # if hasattr(ev, "state_key"): # ev.prev_state = [ # (e_id, h) # for e_id, h, is_state in prevs # if is_state == 1 # ] return [e.build() for e in events]
def fingerprint(certificate): finger = hashlib.sha256(certificate) return {"sha256": encode_base64(finger.digest())}
def _parse_events_txn(self, txn, rows): events = [self._parse_event_from_row(r) for r in rows] select_event_sql = ( "SELECT * FROM events WHERE event_id = ? ORDER BY rowid asc" ) for i, ev in enumerate(events): signatures = self._get_event_signatures_txn( txn, ev.event_id, ) ev.signatures = { n: { k: encode_base64(v) for k, v in s.items() } for n, s in signatures.items() } hashes = self._get_event_content_hashes_txn( txn, ev.event_id, ) ev.hashes = { k: encode_base64(v) for k, v in hashes.items() } prevs = self._get_prev_events_and_state(txn, ev.event_id) ev.prev_events = [ (e_id, h) for e_id, h, is_state in prevs if is_state == 0 ] ev.auth_events = self._get_auth_events(txn, ev.event_id) if hasattr(ev, "state_key"): ev.prev_state = [ (e_id, h) for e_id, h, is_state in prevs if is_state == 1 ] if hasattr(ev, "replaces_state"): # Load previous state_content. # FIXME (erikj): Handle multiple prev_states. cursor = txn.execute( select_event_sql, (ev.replaces_state,) ) prevs = self.cursor_to_dict(cursor) if prevs: prev = self._parse_event_from_row(prevs[0]) ev.prev_content = prev.content if not hasattr(ev, "redacted"): logger.debug("Doesn't have redacted key: %s", ev) ev.redacted = self._has_been_redacted_txn(txn, ev) if ev.redacted: # Get the redaction event. select_event_sql = "SELECT * FROM events WHERE event_id = ?" txn.execute(select_event_sql, (ev.redacted,)) del_evs = self._parse_events_txn( txn, self.cursor_to_dict(txn) ) if del_evs: ev = prune_event(ev) events[i] = ev ev.redacted_because = del_evs[0] return events
def get_server_verify_key(self, server_name, key_ids): """Finds a verification key for the server with one of the key ids. Args: server_name (str): The name of the server to fetch a key for. keys_ids (list of str): The key_ids to check for. """ # Check the datastore to see if we have one cached. cached = yield self.store.get_server_verify_keys(server_name, key_ids) if cached: defer.returnValue(cached[0]) return # Try to fetch the key from the remote server. limiter = yield get_retry_limiter( server_name, self.clock, self.store, ) with limiter: (response, tls_certificate) = yield fetch_server_key( server_name, self.hs.tls_context_factory ) # Check the response. x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, tls_certificate ) if ("signatures" not in response or server_name not in response["signatures"]): raise ValueError("Key response not signed by remote server") if "tls_certificate" not in response: raise ValueError("Key response missing TLS certificate") tls_certificate_b64 = response["tls_certificate"] if encode_base64(x509_certificate_bytes) != tls_certificate_b64: raise ValueError("TLS certificate doesn't match") verify_keys = {} for key_id, key_base64 in response["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_keys[key_id] = verify_key for key_id in response["signatures"][server_name]: if key_id not in response["verify_keys"]: raise ValueError( "Key response must include verification keys for all" " signatures" ) if key_id in verify_keys: verify_signed_json( response, server_name, verify_keys[key_id] ) # Cache the result in the datastore. time_now_ms = self.clock.time_msec() yield self.store.store_server_certificate( server_name, server_name, time_now_ms, tls_certificate, ) for key_id, key in verify_keys.items(): yield self.store.store_server_verify_key( server_name, server_name, time_now_ms, key ) for key_id in key_ids: if key_id in verify_keys: defer.returnValue(verify_keys[key_id]) return raise ValueError("No verification key found for given key ids")
def get_server_verify_key_v1_direct(self, server_name, key_ids): """Finds a verification key for the server with one of the key ids. Args: server_name (str): The name of the server to fetch a key for. keys_ids (list of str): The key_ids to check for. """ # Try to fetch the key from the remote server. (response, tls_certificate) = yield fetch_server_key(server_name, self.hs.tls_context_factory) # Check the response. x509_certificate_bytes = crypto.dump_certificate( crypto.FILETYPE_ASN1, tls_certificate) if ("signatures" not in response or server_name not in response["signatures"]): raise ValueError("Key response not signed by remote server") if "tls_certificate" not in response: raise ValueError("Key response missing TLS certificate") tls_certificate_b64 = response["tls_certificate"] if encode_base64(x509_certificate_bytes) != tls_certificate_b64: raise ValueError("TLS certificate doesn't match") # Cache the result in the datastore. time_now_ms = self.clock.time_msec() verify_keys = {} for key_id, key_base64 in response["verify_keys"].items(): if is_signing_algorithm_supported(key_id): key_bytes = decode_base64(key_base64) verify_key = decode_verify_key_bytes(key_id, key_bytes) verify_key.time_added = time_now_ms verify_keys[key_id] = verify_key for key_id in response["signatures"][server_name]: if key_id not in response["verify_keys"]: raise ValueError( "Key response must include verification keys for all" " signatures") if key_id in verify_keys: verify_signed_json(response, server_name, verify_keys[key_id]) yield self.store.store_server_certificate( server_name, server_name, time_now_ms, tls_certificate, ) yield self.store_keys( server_name=server_name, from_server=server_name, verify_keys=verify_keys, ) defer.returnValue(verify_keys)
def _generate_event_json(self, txn, rows): events = [] for row in rows: d = dict(row) d.pop("stream_ordering", None) d.pop("topological_ordering", None) d.pop("processed", None) if "origin_server_ts" not in d: d["origin_server_ts"] = d.pop("ts", 0) else: d.pop("ts", 0) d.pop("prev_state", None) d.update(json.loads(d.pop("unrecognized_keys"))) d["sender"] = d.pop("user_id") d["content"] = json.loads(d["content"]) if "age_ts" not in d: # For compatibility d["age_ts"] = d.get("origin_server_ts", 0) d.setdefault("unsigned", {})["age_ts"] = d.pop("age_ts") outlier = d.pop("outlier", False) # d.pop("membership", None) d.pop("state_hash", None) d.pop("replaces_state", None) b = EventBuilder(d) b.internal_metadata.outlier = outlier events.append(b) for i, ev in enumerate(events): signatures = self._get_event_signatures_txn( txn, ev.event_id, ) ev.signatures = { n: {k: encode_base64(v) for k, v in s.items()} for n, s in signatures.items() } hashes = self._get_event_content_hashes_txn( txn, ev.event_id, ) ev.hashes = {k: encode_base64(v) for k, v in hashes.items()} prevs = self._get_prev_events_and_state(txn, ev.event_id) ev.prev_events = [(e_id, h) for e_id, h, is_state in prevs if is_state == 0] # ev.auth_events = self._get_auth_events(txn, ev.event_id) hashes = dict(ev.auth_events) for e_id, hash in ev.prev_events: if e_id in hashes and not hash: hash.update(hashes[e_id]) # # if hasattr(ev, "state_key"): # ev.prev_state = [ # (e_id, h) # for e_id, h, is_state in prevs # if is_state == 1 # ] return [e.build() for e in events]