def test_wait_for_previous_lookups(self): kr = keyring.Keyring(self.hs) lookup_1_deferred = defer.Deferred() lookup_2_deferred = defer.Deferred() # we run the lookup in a logcontext so that the patched inlineCallbacks can check # it is doing the right thing with logcontexts. wait_1_deferred = run_in_context(kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_1_deferred}) # there were no previous lookups, so the deferred should be ready self.successResultOf(wait_1_deferred) # set off another wait. It should block because the first lookup # hasn't yet completed. wait_2_deferred = run_in_context(kr.wait_for_previous_lookups, ["server1"], {"server1": lookup_2_deferred}) self.assertFalse(wait_2_deferred.called) # let the first lookup complete (in the sentinel context) lookup_1_deferred.callback(None) # now the second wait should complete. self.successResultOf(wait_2_deferred)
def test_verify_json_dedupes_key_requests(self): """Two requests for the same key should be deduped.""" key1 = signedjson.key.generate_signing_key(1) async def get_keys(keys_to_fetch): # there should only be one request object (with the max validity) self.assertEqual(keys_to_fetch, {"server1": {get_key_id(key1): 1500}}) return { "server1": { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200) } } mock_fetcher = keyring.KeyFetcher() mock_fetcher.get_keys = Mock(side_effect=get_keys) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher,)) json1 = {} signedjson.sign.sign_json(json1, "server1", key1) # the first request should succeed; the second should fail because the key # has expired results = kr.verify_json_objects_for_server( [("server1", json1, 500, "test1"), ("server1", json1, 1500, "test2")] ) self.assertEqual(len(results), 2) self.get_success(results[0]) e = self.get_failure(results[1], SynapseError).value self.assertEqual(e.errcode, "M_UNAUTHORIZED") self.assertEqual(e.code, 401) # there should have been a single call to the fetcher mock_fetcher.get_keys.assert_called_once()
def test_verify_for_local_server_unknown_key(self): """Local keys that we no longer have should be fetched via the fetcher""" # the key we'll sign things with (nb, not known to the Keyring) key2 = signedjson.key.generate_signing_key("2") # set up a mock fetcher which will return the key async def get_keys( server_name: str, key_ids: List[str], minimum_valid_until_ts: int) -> Dict[str, FetchKeyResult]: self.assertEqual(server_name, self.hs.hostname) self.assertEqual(key_ids, [get_key_id(key2)]) return { get_key_id(key2): FetchKeyResult(get_verify_key(key2), 1200) } mock_fetcher = Mock() mock_fetcher.get_keys = Mock(side_effect=get_keys) kr = keyring.Keyring(self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher)) # sign the json json1 = {} signedjson.sign.sign_json(json1, self.hs.hostname, key2) # ... and check we can verify it. d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d)
def test_get_keys_from_server(self): # arbitrarily advance the clock a bit self.reactor.advance(100) SERVER_NAME = "server2" kr = keyring.Keyring(self.hs) testkey = signedjson.key.generate_signing_key("ver1") testverifykey = signedjson.key.get_verify_key(testkey) testverifykey_id = "ed25519:ver1" VALID_UNTIL_TS = 1000 # valid response response = { "server_name": SERVER_NAME, "old_verify_keys": {}, "valid_until_ts": VALID_UNTIL_TS, "verify_keys": { testverifykey_id: { "key": signedjson.key.encode_verify_key_base64(testverifykey) } }, } signedjson.sign.sign_json(response, SERVER_NAME, testkey) def get_json(destination, path, **kwargs): self.assertEqual(destination, SERVER_NAME) self.assertEqual(path, "/_matrix/key/v2/server/key1") return response self.http_client.get_json.side_effect = get_json server_name_and_key_ids = [(SERVER_NAME, ("key1", ))] keys = self.get_success( kr.get_keys_from_server(server_name_and_key_ids)) k = keys[SERVER_NAME][testverifykey_id] self.assertEqual(k, testverifykey) self.assertEqual(k.alg, "ed25519") self.assertEqual(k.version, "ver1") # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) key_json = self.get_success( self.hs.get_datastore().get_server_keys_json([lookup_triplet])) res = key_json[lookup_triplet] self.assertEqual(len(res), 1) res = res[0] self.assertEqual(res["key_id"], testverifykey_id) self.assertEqual(res["from_server"], SERVER_NAME) self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000) self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS) # we expect it to be encoded as canonical json *before* it hits the db self.assertEqual(bytes(res["key_json"]), canonicaljson.encode_canonical_json(response)) # change the server name: it should cause a rejection response["server_name"] = "OTHER_SERVER" self.get_failure(kr.get_keys_from_server(server_name_and_key_ids), KeyLookupError)
def test_verify_json_for_server(self): kr = keyring.Keyring(self.hs) key1 = signedjson.key.generate_signing_key(1) yield self.hs.datastore.store_server_verify_key( "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1), ) json1 = {} signedjson.sign.sign_json(json1, "server9", key1) sentinel_context = LoggingContext.current_context() with LoggingContext("one") as context_one: context_one.request = "one" defer = kr.verify_json_for_server("server9", {}) try: yield defer self.fail("should fail on unsigned json") except SynapseError: pass self.assertIs(LoggingContext.current_context(), context_one) defer = kr.verify_json_for_server("server9", json1) self.assertFalse(defer.called) self.assertIs(LoggingContext.current_context(), sentinel_context) yield defer self.assertIs(LoggingContext.current_context(), context_one)
def test_verify_for_local_server_old_key(self): """Can also use keys in old_signing_keys for verification""" json1 = {} signedjson.sign.sign_json(json1, self.hs.hostname, self.OLD_KEY) kr = keyring.Keyring(self.hs) d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d)
def test_verify_json_falls_back_to_other_fetchers(self): """If the first fetcher cannot provide a recent enough key, we fall back""" key1 = signedjson.key.generate_signing_key(1) async def get_keys1( server_name: str, key_ids: List[str], minimum_valid_until_ts: int) -> Dict[str, FetchKeyResult]: self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 1500) return { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800) } async def get_keys2( server_name: str, key_ids: List[str], minimum_valid_until_ts: int) -> Dict[str, FetchKeyResult]: self.assertEqual(server_name, "server1") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 1500) return { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200) } mock_fetcher1 = Mock() mock_fetcher1.get_keys = Mock(side_effect=get_keys1) mock_fetcher2 = Mock() mock_fetcher2.get_keys = Mock(side_effect=get_keys2) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2)) json1 = {} signedjson.sign.sign_json(json1, "server1", key1) results = kr.verify_json_objects_for_server([ ( "server1", json1, 1200, ), ( "server1", json1, 1500, ), ]) self.assertEqual(len(results), 2) self.get_success(results[0]) e = self.get_failure(results[1], SynapseError).value self.assertEqual(e.errcode, "M_UNAUTHORIZED") self.assertEqual(e.code, 401) # there should have been a single call to each fetcher mock_fetcher1.get_keys.assert_called_once() mock_fetcher2.get_keys.assert_called_once()
def test_verify_for_local_server(self): """Ensure that locally signed JSON can be verified without fetching keys over federation """ kr = keyring.Keyring(self.hs) json1 = {} signedjson.sign.sign_json(json1, self.hs.hostname, self.hs.signing_key) # Test that verify_json_for_server succeeds on a object signed by ourselves d = kr.verify_json_for_server(self.hs.hostname, json1, 0) self.get_success(d)
def test_verify_json_falls_back_to_other_fetchers(self): """If the first fetcher cannot provide a recent enough key, we fall back""" key1 = signedjson.key.generate_signing_key(1) def get_keys1(keys_to_fetch): self.assertEqual(keys_to_fetch, {"server1": { get_key_id(key1): 1500 }}) return defer.succeed({ "server1": { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 800) } }) def get_keys2(keys_to_fetch): self.assertEqual(keys_to_fetch, {"server1": { get_key_id(key1): 1500 }}) return defer.succeed({ "server1": { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 1200) } }) mock_fetcher1 = keyring.KeyFetcher() mock_fetcher1.get_keys = Mock(side_effect=get_keys1) mock_fetcher2 = keyring.KeyFetcher() mock_fetcher2.get_keys = Mock(side_effect=get_keys2) kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher1, mock_fetcher2)) json1 = {} signedjson.sign.sign_json(json1, "server1", key1) results = kr.verify_json_objects_for_server([ ("server1", json1, 1200, "test1"), ("server1", json1, 1500, "test2") ]) self.assertEqual(len(results), 2) self.get_success(results[0]) e = self.get_failure(results[1], SynapseError).value self.assertEqual(e.errcode, "M_UNAUTHORIZED") self.assertEqual(e.code, 401) # there should have been a single call to each fetcher mock_fetcher1.get_keys.assert_called_once() mock_fetcher2.get_keys.assert_called_once()
def test_verify_json_for_server(self): kr = keyring.Keyring(self.hs) key1 = signedjson.key.generate_signing_key(1) r = self.hs.datastore.store_server_verify_key( "server9", "", time.time() * 1000, signedjson.key.get_verify_key(key1)) self.get_success(r) json1 = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object d = _verify_json_for_server(kr, "server9", {}) self.failureResultOf(d, SynapseError) d = _verify_json_for_server(kr, "server9", json1) self.assertFalse(d.called) self.get_success(d)
def test_verify_json_for_server_with_null_valid_until_ms(self): """Tests that we correctly handle key requests for keys we've stored with a null `ts_valid_until_ms` """ mock_fetcher = keyring.KeyFetcher() mock_fetcher.get_keys = Mock(return_value=defer.succeed({})) kr = keyring.Keyring( self.hs, key_fetchers=(StoreKeyFetcher(self.hs), mock_fetcher) ) key1 = signedjson.key.generate_signing_key(1) r = self.hs.datastore.store_server_verify_keys( "server9", time.time() * 1000, [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), None))], ) self.get_success(r) json1 = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned") self.failureResultOf(d, SynapseError) # should fail on a signed object with a non-zero minimum_valid_until_ms, # as it tries to refetch the keys and fails. d = _verify_json_for_server( kr, "server9", json1, 500, "test signed non-zero min" ) self.get_failure(d, SynapseError) # We expect the keyring tried to refetch the key once. mock_fetcher.get_keys.assert_called_once_with( {"server9": {get_key_id(key1): 500}} ) # should succeed on a signed object with a 0 minimum_valid_until_ms d = _verify_json_for_server( kr, "server9", json1, 0, "test signed with zero min" ) self.get_success(d)
def test_wait_for_previous_lookups(self): sentinel_context = LoggingContext.current_context() kr = keyring.Keyring(self.hs) lookup_1_deferred = defer.Deferred() lookup_2_deferred = defer.Deferred() with LoggingContext("one") as context_one: context_one.request = "one" wait_1_deferred = kr.wait_for_previous_lookups( ["server1"], {"server1": lookup_1_deferred}, ) # there were no previous lookups, so the deferred should be ready self.assertTrue(wait_1_deferred.called) # ... so we should have preserved the LoggingContext. self.assertIs(LoggingContext.current_context(), context_one) wait_1_deferred.addBoth(self.check_context, "one") with LoggingContext("two") as context_two: context_two.request = "two" # set off another wait. It should block because the first lookup # hasn't yet completed. wait_2_deferred = kr.wait_for_previous_lookups( ["server1"], {"server1": lookup_2_deferred}, ) self.assertFalse(wait_2_deferred.called) # ... so we should have reset the LoggingContext. self.assertIs(LoggingContext.current_context(), sentinel_context) wait_2_deferred.addBoth(self.check_context, "two") # let the first lookup complete (in the sentinel context) lookup_1_deferred.callback(None) # now the second wait should complete and restore our # loggingcontext. yield wait_2_deferred
def test_verify_json_for_server(self): kr = keyring.Keyring(self.hs) key1 = signedjson.key.generate_signing_key(1) r = self.hs.datastore.store_server_verify_keys( "server9", time.time() * 1000, [("server9", get_key_id(key1), FetchKeyResult(get_verify_key(key1), 1000))], ) self.get_success(r) json1 = {} signedjson.sign.sign_json(json1, "server9", key1) # should fail immediately on an unsigned object d = _verify_json_for_server(kr, "server9", {}, 0, "test unsigned") self.failureResultOf(d, SynapseError) # should suceed on a signed object d = _verify_json_for_server(kr, "server9", json1, 500, "test signed") # self.assertFalse(d.called) self.get_success(d)
def test_get_keys_from_perspectives(self): # arbitrarily advance the clock a bit self.reactor.advance(100) SERVER_NAME = "server2" kr = keyring.Keyring(self.hs) testkey = signedjson.key.generate_signing_key("ver1") testverifykey = signedjson.key.get_verify_key(testkey) testverifykey_id = "ed25519:ver1" VALID_UNTIL_TS = 200 * 1000 # valid response response = { "server_name": SERVER_NAME, "old_verify_keys": {}, "valid_until_ts": VALID_UNTIL_TS, "verify_keys": { testverifykey_id: { "key": signedjson.key.encode_verify_key_base64(testverifykey) } }, } persp_resp = { "server_keys": [self.mock_perspective_server.get_signed_response(response)] } def post_json(destination, path, data, **kwargs): self.assertEqual(destination, self.mock_perspective_server.server_name) self.assertEqual(path, "/_matrix/key/v2/query") # check that the request is for the expected key q = data["server_keys"] self.assertEqual(list(q[SERVER_NAME].keys()), ["key1"]) return persp_resp self.http_client.post_json.side_effect = post_json server_name_and_key_ids = [(SERVER_NAME, ("key1", ))] keys = self.get_success( kr.get_keys_from_perspectives(server_name_and_key_ids)) self.assertIn(SERVER_NAME, keys) k = keys[SERVER_NAME][testverifykey_id] self.assertEqual(k, testverifykey) self.assertEqual(k.alg, "ed25519") self.assertEqual(k.version, "ver1") # check that the perspectives store is correctly updated lookup_triplet = (SERVER_NAME, testverifykey_id, None) key_json = self.get_success( self.hs.get_datastore().get_server_keys_json([lookup_triplet])) res = key_json[lookup_triplet] self.assertEqual(len(res), 1) res = res[0] self.assertEqual(res["key_id"], testverifykey_id) self.assertEqual(res["from_server"], self.mock_perspective_server.server_name) self.assertEqual(res["ts_added_ms"], self.reactor.seconds() * 1000) self.assertEqual(res["ts_valid_until_ms"], VALID_UNTIL_TS) self.assertEqual( bytes(res["key_json"]), canonicaljson.encode_canonical_json(persp_resp["server_keys"][0]), )
def test_verify_json_objects_for_server_awaits_previous_requests(self): key1 = signedjson.key.generate_signing_key(1) kr = keyring.Keyring(self.hs) json1 = {} signedjson.sign.sign_json(json1, "server10", key1) persp_resp = { "server_keys": [ self.mock_perspective_server.get_signed_key( "server10", signedjson.key.get_verify_key(key1)) ] } persp_deferred = defer.Deferred() @defer.inlineCallbacks def get_perspectives(**kwargs): self.assertEquals(LoggingContext.current_context().request, "11") with logcontext.PreserveLoggingContext(): yield persp_deferred defer.returnValue(persp_resp) self.http_client.post_json.side_effect = get_perspectives # start off a first set of lookups @defer.inlineCallbacks def first_lookup(): with LoggingContext("11") as context_11: context_11.request = "11" res_deferreds = kr.verify_json_objects_for_server([ ("server10", json1), ("server11", {}) ]) # the unsigned json should be rejected pretty quickly self.assertTrue(res_deferreds[1].called) try: yield res_deferreds[1] self.assertFalse("unsigned json didn't cause a failure") except SynapseError: pass self.assertFalse(res_deferreds[0].called) res_deferreds[0].addBoth(self.check_context, None) yield logcontext.make_deferred_yieldable(res_deferreds[0]) # let verify_json_objects_for_server finish its work before we kill the # logcontext yield self.clock.sleep(0) d0 = first_lookup() # wait a tick for it to send the request to the perspectives server # (it first tries the datastore) self.pump() self.http_client.post_json.assert_called_once() # a second request for a server with outstanding requests # should block rather than start a second call @defer.inlineCallbacks def second_lookup(): with LoggingContext("12") as context_12: context_12.request = "12" self.http_client.post_json.reset_mock() self.http_client.post_json.return_value = defer.Deferred() res_deferreds_2 = kr.verify_json_objects_for_server([ ("server10", json1) ]) res_deferreds_2[0].addBoth(self.check_context, None) yield logcontext.make_deferred_yieldable(res_deferreds_2[0]) # let verify_json_objects_for_server finish its work before we kill the # logcontext yield self.clock.sleep(0) d2 = second_lookup() self.pump() self.http_client.post_json.assert_not_called() # complete the first request persp_deferred.callback(persp_resp) self.get_success(d0) self.get_success(d2)
def test_verify_json_objects_for_server_awaits_previous_requests(self): mock_fetcher = Mock() mock_fetcher.get_keys = Mock() kr = keyring.Keyring(self.hs, key_fetchers=(mock_fetcher, )) # a signed object that we are going to try to validate key1 = signedjson.key.generate_signing_key(1) json1 = {} signedjson.sign.sign_json(json1, "server10", key1) # start off a first set of lookups. We make the mock fetcher block until this # deferred completes. first_lookup_deferred = Deferred() async def first_lookup_fetch( server_name: str, key_ids: List[str], minimum_valid_until_ts: int) -> Dict[str, FetchKeyResult]: # self.assertEquals(current_context().request.id, "context_11") self.assertEqual(server_name, "server10") self.assertEqual(key_ids, [get_key_id(key1)]) self.assertEqual(minimum_valid_until_ts, 0) await make_deferred_yieldable(first_lookup_deferred) return { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100) } mock_fetcher.get_keys.side_effect = first_lookup_fetch async def first_lookup(): with LoggingContext("context_11", request=FakeRequest("context_11")): res_deferreds = kr.verify_json_objects_for_server([ ("server10", json1, 0), ("server11", {}, 0) ]) # the unsigned json should be rejected pretty quickly self.assertTrue(res_deferreds[1].called) try: await res_deferreds[1] self.assertFalse("unsigned json didn't cause a failure") except SynapseError: pass self.assertFalse(res_deferreds[0].called) res_deferreds[0].addBoth(self.check_context, None) await make_deferred_yieldable(res_deferreds[0]) d0 = ensureDeferred(first_lookup()) self.pump() mock_fetcher.get_keys.assert_called_once() # a second request for a server with outstanding requests # should block rather than start a second call async def second_lookup_fetch( server_name: str, key_ids: List[str], minimum_valid_until_ts: int) -> Dict[str, FetchKeyResult]: # self.assertEquals(current_context().request.id, "context_12") return { get_key_id(key1): FetchKeyResult(get_verify_key(key1), 100) } mock_fetcher.get_keys.reset_mock() mock_fetcher.get_keys.side_effect = second_lookup_fetch second_lookup_state = [0] async def second_lookup(): with LoggingContext("context_12", request=FakeRequest("context_12")): res_deferreds_2 = kr.verify_json_objects_for_server([( "server10", json1, 0, )]) res_deferreds_2[0].addBoth(self.check_context, None) second_lookup_state[0] = 1 await make_deferred_yieldable(res_deferreds_2[0]) second_lookup_state[0] = 2 d2 = ensureDeferred(second_lookup()) self.pump() # the second request should be pending, but the fetcher should not yet have been # called self.assertEqual(second_lookup_state[0], 1) mock_fetcher.get_keys.assert_not_called() # complete the first request first_lookup_deferred.callback(None) # and now both verifications should succeed. self.get_success(d0) self.get_success(d2)
def test_verify_json_objects_for_server_awaits_previous_requests(self): clock = Clock(reactor) key1 = signedjson.key.generate_signing_key(1) kr = keyring.Keyring(self.hs) json1 = {} signedjson.sign.sign_json(json1, "server10", key1) persp_resp = { "server_keys": [ self.mock_perspective_server.get_signed_key( "server10", signedjson.key.get_verify_key(key1)), ] } persp_deferred = defer.Deferred() @defer.inlineCallbacks def get_perspectives(**kwargs): self.assertEquals( LoggingContext.current_context().request, "11", ) with logcontext.PreserveLoggingContext(): yield persp_deferred defer.returnValue(persp_resp) self.http_client.post_json.side_effect = get_perspectives with LoggingContext("11") as context_11: context_11.request = "11" # start off a first set of lookups res_deferreds = kr.verify_json_objects_for_server([ ("server10", json1), ("server11", {}) ]) # the unsigned json should be rejected pretty quickly self.assertTrue(res_deferreds[1].called) try: yield res_deferreds[1] self.assertFalse("unsigned json didn't cause a failure") except SynapseError: pass self.assertFalse(res_deferreds[0].called) res_deferreds[0].addBoth(self.check_context, None) # wait a tick for it to send the request to the perspectives server # (it first tries the datastore) yield clock.sleep(1) # XXX find out why this takes so long! self.http_client.post_json.assert_called_once() self.assertIs(LoggingContext.current_context(), context_11) context_12 = LoggingContext("12") context_12.request = "12" with logcontext.PreserveLoggingContext(context_12): # a second request for a server with outstanding requests # should block rather than start a second call self.http_client.post_json.reset_mock() self.http_client.post_json.return_value = defer.Deferred() res_deferreds_2 = kr.verify_json_objects_for_server( [("server10", json1)], ) yield clock.sleep(1) self.http_client.post_json.assert_not_called() res_deferreds_2[0].addBoth(self.check_context, None) # complete the first request with logcontext.PreserveLoggingContext(): persp_deferred.callback(persp_resp) self.assertIs(LoggingContext.current_context(), context_11) with logcontext.PreserveLoggingContext(): yield res_deferreds[0] yield res_deferreds_2[0]