def test_store_retrieve_unauthenticated(omq, random_sn, sk, exclude): """Attempts to retrieve messages without authentication. This should fail (as of HF19).""" sns = ss.random_swarm_members(ss.get_swarm(omq, random_sn, sk), 2, exclude) conn1 = omq.connect_remote(sn_address(sns[0])) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message for myself s = omq.request_future(conn1, 'storage.store', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"abc 123").decode() }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert all(v['hash'] == hash for v in s['swarm'].values()) conn2 = omq.connect_remote(sn_address(sns[1])) r = omq.request_future( conn2, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex() }).encode()]).get() assert r == [b'401', b'retrieve: request signature required']
def big_store(omq, random_sn, exclude): sk = SigningKey.generate() swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) pk = '03' + sk.verify_key.encode().hex() hashes = [] for x in range(12): s = [] for y in range(10): ts = int(time.time() * 1000) exp = ts + ttl msg = nacl.utils.random(msg_size) s.append(omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": pk, "timestamp": ts, "ttl": ttl, "data": base64.b64encode(msg).decode()}).encode()])) for si in s: si = si.get() assert len(si) == 1 si = json.loads(si[0]) assert 'hash' in si hashes.append(si['hash']) return { 'conn': conn, 'sk': sk, 'pk': pk, 'hashes': hashes }
def test_non_session_no_ed25519(omq, random_sn, sk, exclude): """ Test that the session key hack doesn't work for non-Session addresses (i.e. when not using the 05 prefix). """ xsk = sk.to_curve25519_private_key() xpk = xsk.public_key swarm = ss.get_swarm(omq, random_sn, xsk, netid=4) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) msgs = ss.store_n(omq, conn, xsk, b"omg123", 4) my_ss_id = '04' + xsk.public_key.encode().hex() ts = int(time.time() * 1000) to_sign = "delete_all{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = { "pubkey": my_ss_id, "pubkey_ed25519": sk.verify_key.encode().hex(), "timestamp": ts, "signature": sig } resp = omq.request_future(conn, 'storage.delete_all', [json.dumps(params).encode()]).get() assert resp == [ b'400', b'invalid request: pubkey_ed25519 is only permitted for 05[...] pubkeys' ]
def test_stale_delete_all(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 2, exclude)[0] conn = omq.connect_remote(sn_address(sn)) msgs = ss.store_n(omq, conn, sk, b"omg123", 5) my_ss_id = '05' + sk.verify_key.encode().hex() ts = int((time.time() - 120) * 1000) to_sign = "delete_all{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = {"pubkey": my_ss_id, "timestamp": ts, "signature": sig} resp_too_old = omq.request_future(conn, 'storage.delete_all', [json.dumps(params).encode()]) ts = int((time.time() + 120) * 1000) to_sign = "delete_all{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params["signature"] = sig resp_too_new = omq.request_future(conn, 'storage.delete_all', [json.dumps(params).encode()]) assert resp_too_old.get() == [ b'406', b'delete_all timestamp too far from current time' ] assert resp_too_new.get() == [ b'406', b'delete_all timestamp too far from current time' ]
def test_expire_all(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sns = ss.random_swarm_members(swarm, 2, exclude) conns = [omq.connect_remote(sn_address(sn)) for sn in sns] msgs = ss.store_n(omq, conns[0], sk, b"omg123", 5) my_ss_id = '05' + sk.verify_key.encode().hex() ts = msgs[2]['req']['expiry'] to_sign = "expire_all{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "expiry": ts, "signature": sig }).encode() resp = omq.request_future(conns[1], 'storage.expire_all', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # 0 and 1 have later expiries than 2, so they should get updated; 2's expiry is already the # given value, and 3/4 are <= so shouldn't get updated. msg_hashes = sorted(msgs[i]['hash'] for i in (0, 1)) # signature of ( PUBKEY_HEX || EXPIRY || UPDATED[0] || ... || UPDATED[N] ) expected_signed = "".join((my_ss_id, str(ts), *msg_hashes)).encode() for k, v in r['swarm'].items(): assert v['updated'] == msg_hashes edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(expected_signed, base64.b64decode(v['signature'])) r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 5 assert r['messages'][0]['expiration'] == ts assert r['messages'][1]['expiration'] == ts assert r['messages'][2]['expiration'] == ts assert r['messages'][3]['expiration'] == msgs[3]['req']['expiry'] assert r['messages'][4]['expiration'] == msgs[4]['req']['expiry']
def test_retrieve_subkey(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, 3) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message for myself, using master key s = omq.request_future(conn, 'storage.store', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), 'namespace': 42, "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"abc 123").decode(), "signature": sk.sign(f"store42{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'42' + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') for k, v in s['swarm'].items(): assert hash == v['hash'] # Retrieve it using a subkey dude_sk = SigningKey.generate() c, d, D = make_subkey(sk, dude_sk.verify_key) to_sign = f"retrieve42{ts}".encode() sig = blinded_ed25519_signature(to_sign, dude_sk, d, D) r = omq.request_future(conn, 'storage.retrieve', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), "namespace": 42, "timestamp": ts, "signature": base64.b64encode(sig).decode(), "subkey": base64.b64encode(c).decode(), }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert r["hf"] >= [19, 0] assert len(r["messages"]) == 1 assert r["messages"][0]["hash"] == hash
def test_delete(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, netid=2) sns = ss.random_swarm_members(swarm, 2, exclude) conns = [omq.connect_remote(sn_address(sn)) for sn in sns] msgs = ss.store_n(omq, conns[0], sk, b"omg123", 5, netid=2) my_ss_id = '02' + sk.verify_key.encode().hex() ts = int(time.time() * 1000) actual_del_msgs = sorted(msgs[i]['hash'] for i in (1, 4)) # Deliberately mis-sort the requested hashes to verify that the return is sorted as expected del_msgs = sorted(actual_del_msgs + ['bepQtTaYrzcuCXO3fZkmk/h3xkMQ3vCh94i5HzLmj3I'], reverse=True) to_sign = ("delete" + "".join(del_msgs)).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "messages": del_msgs, "signature": sig }).encode() resp = omq.request_future(conns[1], 'storage.delete', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # ( PUBKEY_HEX || RMSG[0] || ... || RMSG[N] || DMSG[0] || ... || DMSG[M] ) expected_signed = "".join((my_ss_id, *del_msgs, *actual_del_msgs)).encode() for k, v in r['swarm'].items(): assert v['deleted'] == actual_del_msgs edpk = VerifyKey(k, encoder=HexEncoder) try: edpk.verify(expected_signed, base64.b64decode(v['signature'])) except nacl.exceptions.BadSignatureError as e: print("Bad signature from swarm member {}".format(k)) raise e r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 3
def test_batch_bt(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, 3) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store two messages for myself s = omq.request_future(conn, 'storage.batch', [ bt_serialize({ "requests": [ { "method": "store", "params": { "pubkey": '03' + sk.verify_key.encode().hex(), 'namespace': 42, "timestamp": ts, "ttl": ttl, "data": b"abc 123", "signature": sk.sign( f"store42{ts}".encode()).signature, }, }, { "method": "store", "params": { "pubkey": '03' + sk.verify_key.encode().hex(), 'namespace': 42, "timestamp": ts, "ttl": ttl, "data": b"xyz 123", "signature": sk.sign( f"store42{ts}".encode()).signature, }, }, ], }) ]).get() assert len(s) == 1 s = bt_deserialize(s[0]) assert b"results" in s assert len(s[b"results"]) == 2 assert s[b"results"][0][b"code"] == 200 assert s[b"results"][1][b"code"] == 200 hash0 = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'42' + b'abc 123', encoder=Base64Encoder).rstrip(b'=') hash1 = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'42' + b'xyz 123', encoder=Base64Encoder).rstrip(b'=') assert s[b"results"][0][b"body"][b"hash"] == hash0 assert s[b"results"][1][b"body"][b"hash"] == hash1
def test_legacy_closed_ns(omq, random_sn, sk, exclude): # For legacy closed groups the secret key is generated but then immediately discarded; it's only # used to generate a primary key storage address: swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # namespace -10 is a special, no-auth namespace for legacy closed group messages. sclosed = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": -10, "data": base64.b64encode("blah blah".encode()).decode()})]) sclosed = json.loads(sclosed.get()[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'-10' + b'blah blah', encoder=Base64Encoder).decode().rstrip('=') assert len(sclosed["swarm"]) == len(swarm['snodes']) edkeys = {x['pubkey_ed25519'] for x in swarm['snodes']} for k, v in sclosed['swarm'].items(): assert k in edkeys assert hash == v['hash'] edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(v['hash'].encode(), base64.b64decode(v['signature'])) # NB: assumes the test machine is reasonably time synced assert(ts - 30000 <= sclosed['t'] <= ts + 30000) # Now retrieve it: this is the only namespace we can access without authentication r = omq.request_future(conn, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "namespace": -10, }).encode()]) r = r.get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 1 msg = r['messages'][0] assert base64.b64decode(msg['data']) == b'blah blah' assert msg['timestamp'] == ts assert msg['expiration'] == exp assert msg['hash'] == hash
def test_delete_all(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sns = ss.random_swarm_members(swarm, 2, exclude) conns = [omq.connect_remote(sn_address(sn)) for sn in sns] msgs = ss.store_n(omq, conns[0], sk, b"omg123", 5) my_ss_id = '05' + sk.verify_key.encode().hex() ts = int(time.time() * 1000) to_sign = "delete_all{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sig }).encode() resp = omq.request_future(conns[1], 'storage.delete_all', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} msg_hashes = sorted(m['hash'] for m in msgs) # signature of ( PUBKEY_HEX || TIMESTAMP || DELETEDHASH[0] || ... || DELETEDHASH[N] ) expected_signed = "".join((my_ss_id, str(ts), *msg_hashes)).encode() for k, v in r['swarm'].items(): assert v['deleted'] == msg_hashes edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(expected_signed, base64.b64decode(v['signature'])) r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert not r['messages']
def test_store_invalid_ns(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Attempt to store a message without authentication in a non-public (% 10 != 0) namespace: s42 = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": 42, "data": base64.b64encode("abc 123".encode()).decode()}).encode()]) # Attempt to store a message in a too-big/too-small namespace: s32k = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": 32768, "data": base64.b64encode("abc 123".encode()).decode()}).encode()]) # Bad signature: dude_sk = SigningKey.generate() sdude = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": -32123, "signature": dude_sk.sign(f"store-32123{ts}".encode(), encoder=Base64Encoder).signature.decode(), "data": base64.b64encode("abc 123".encode()).decode()}).encode()]) assert s42.get() == [b'401', b'store: signature required to store to namespace 42'] assert s32k.get() == [b'400', b"invalid request: Invalid value given for 'namespace': value out of range"] assert sdude.get() == [b'401', b"store signature verification failed"]
def test_store(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message for myself s = omq.request_future(conn, 'storage.store', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode("abc 123".encode()).decode() }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert len(s["swarm"]) == len(swarm['snodes']) edkeys = {x['pubkey_ed25519'] for x in swarm['snodes']} for k, v in s['swarm'].items(): assert k in edkeys assert hash == v['hash'] edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(v['hash'].encode(), base64.b64decode(v['signature'])) # NB: assumes the test machine is reasonably time synced assert (ts - 30000 <= s['t'] <= ts + 30000)
def test_store_ns(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message (publicly depositable namespace, divisible by 10) spub = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": 40, "data": base64.b64encode("abc 123".encode()).decode()}).encode()]) # Store a message for myself in a private namespace (not divisible by 10) spriv = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": -42, "data": base64.b64encode("abc 123".encode()).decode(), "signature": sk.sign(f"store-42{ts}".encode(), encoder=Base64Encoder).signature.decode()}).encode()]) spub = json.loads(spub.get()[0]) hpub = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'40' + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert len(spub["swarm"]) == len(swarm['snodes']) edkeys = {x['pubkey_ed25519'] for x in swarm['snodes']} for k, v in spub['swarm'].items(): assert k in edkeys assert hpub == v['hash'] edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(v['hash'].encode(), base64.b64decode(v['signature'])) # NB: assumes the test machine is reasonably time synced assert(ts - 30000 <= spub['t'] <= ts + 30000) spriv = json.loads(spriv.get()[0]) hpriv = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'-42' + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert len(spriv["swarm"]) == len(swarm['snodes']) edkeys = {x['pubkey_ed25519'] for x in swarm['snodes']} for k, v in spriv['swarm'].items(): assert k in edkeys assert hpriv == v['hash'] edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(v['hash'].encode(), base64.b64decode(v['signature'])) # NB: assumes the test machine is reasonably time synced assert(ts - 30000 <= spriv['t'] <= ts + 30000) rpub = omq.request_future(conn, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "namespace": 40, "signature": sk.sign(f"retrieve40{ts}".encode(), encoder=Base64Encoder).signature.decode()}).encode()]) rpriv = omq.request_future(conn, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "namespace": -42, "signature": sk.sign(f"retrieve-42{ts}".encode(), encoder=Base64Encoder).signature.decode()}).encode()]) rdenied = omq.request_future(conn, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "namespace": 40 }).encode()]) rpub = rpub.get() assert len(rpub) == 1 rpub = json.loads(rpub[0]) assert len(rpub["messages"]) == 1 assert rpub["messages"][0]["hash"] == hpub rpriv = rpriv.get() assert len(rpriv) == 1 rpriv = json.loads(rpriv[0]) assert len(rpriv["messages"]) == 1 assert rpriv["messages"][0]["hash"] == hpriv assert rdenied.get() == [b'400', b"invalid request: Required field 'signature' missing"]
def test_delete_before(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sns = ss.random_swarm_members(swarm, 2, exclude) conns = [omq.connect_remote(sn_address(sn)) for sn in sns] msgs = ss.store_n(omq, conns[0], sk, b"omg123", 10) # store_n submits msgs with decreasing timestamps: assert all(msgs[i]['req']['timestamp'] > msgs[i + 1]['req']['timestamp'] for i in range(len(msgs) - 1)) my_ss_id = '05' + sk.verify_key.encode().hex() # Delete the last couple messages: ts = msgs[8]['req']['timestamp'] expected_del = sorted(msgs[i]['hash'] for i in range(8, len(msgs))) to_sign = ("delete_before" + str(ts)).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "before": ts, "signature": sig }).encode() resp = omq.request_future(conns[1], 'storage.delete_before', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # ( PUBKEY_HEX || BEFORE || DELETEDHASH[0] || ... || DELETEDHASH[N] ) expected_signed = "".join((my_ss_id, str(ts), *expected_del)).encode() for k, v in r['swarm'].items(): assert v['deleted'] == expected_del edpk = VerifyKey(k, encoder=HexEncoder) try: edpk.verify(expected_signed, base64.b64decode(v['signature'])) except nacl.exceptions.BadSignatureError as e: print("Bad signature from swarm member {}".format(k)) raise e r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 8 # Delete with no matches: ts = msgs[7]['req']['timestamp'] - 1 to_sign = ("delete_before" + str(ts)).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "before": ts, "signature": sig }).encode() resp = omq.request_future(conns[0], 'storage.delete_before', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # ( PUBKEY_HEX || BEFORE || DELETEDHASH[0] || ... || DELETEDHASH[N] ) expected_signed = "".join((my_ss_id, str(ts))).encode() for k, v in r['swarm'].items(): assert not v['deleted'] edpk = VerifyKey(k, encoder=HexEncoder) try: edpk.verify(expected_signed, base64.b64decode(v['signature'])) except nacl.exceptions.BadSignatureError as e: print("Bad signature from swarm member {}".format(k)) raise e r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 8 # Delete most of the remaining: ts = msgs[1]['req']['timestamp'] expected_del = sorted(msgs[i]['hash'] for i in range(1, 8)) to_sign = ("delete_before" + str(ts)).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "before": ts, "signature": sig }).encode() resp = omq.request_future(conns[0], 'storage.delete_before', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # ( PUBKEY_HEX || BEFORE || DELETEDHASH[0] || ... || DELETEDHASH[N] ) expected_signed = "".join((my_ss_id, str(ts), *expected_del)).encode() for k, v in r['swarm'].items(): assert v['deleted'] == expected_del edpk = VerifyKey(k, encoder=HexEncoder) try: edpk.verify(expected_signed, base64.b64decode(v['signature'])) except nacl.exceptions.BadSignatureError as e: print("Bad signature from swarm member {}".format(k)) raise e r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 1 # Delete the last one ts = msgs[0]['req']['timestamp'] + 1 expected_del = [msgs[0]['hash']] to_sign = ("delete_before" + str(ts)).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "before": ts, "signature": sig }).encode() resp = omq.request_future(conns[1], 'storage.delete_before', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # ( PUBKEY_HEX || BEFORE || DELETEDHASH[0] || ... || DELETEDHASH[N] ) expected_signed = "".join((my_ss_id, str(ts), *expected_del)).encode() for k, v in r['swarm'].items(): assert v['deleted'] == expected_del edpk = VerifyKey(k, encoder=HexEncoder) try: edpk.verify(expected_signed, base64.b64decode(v['signature'])) except nacl.exceptions.BadSignatureError as e: print("Bad signature from swarm member {}".format(k)) raise e r = omq.request_future(conns[1], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert not r['messages']
def test_store_retrieve_authenticated(omq, random_sn, sk, exclude): xsk = sk.to_curve25519_private_key() xpk = xsk.public_key sn_x = ss.random_swarm_members(ss.get_swarm(omq, random_sn, xsk), 1, exclude)[0] sn_ed = ss.random_swarm_members(ss.get_swarm(omq, random_sn, sk), 1, exclude)[0] conn_x = omq.connect_remote(sn_address(sn_x)) conn_ed = omq.connect_remote(sn_address(sn_ed)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store message for myself, using both my ed25519 key and x25519 key to test different auth # modes s1 = omq.request_future(conn_x, 'storage.store', [ json.dumps({ "pubkey": '05' + xpk.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"abc 123").decode() }).encode() ]) s2 = omq.request_future(conn_ed, 'storage.store', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"def 456").decode() }).encode() ]) s1 = s1.get() assert len(s1) == 1 s1 = json.loads(s1[0]) hash1 = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + xpk.encode() + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert all(v['hash'] == hash1 for v in s1['swarm'].values()) s2 = s2.get() assert len(s2) == 1 s2 = json.loads(s2[0]) hash2 = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'def 456', encoder=Base64Encoder).decode().rstrip('=') to_sign = "retrieve{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() badsig = sig[0:4] + ('z' if sig[4] != 'z' else 'a') + sig[5:] r_good1 = omq.request_future(conn_x, 'storage.retrieve', [ json.dumps({ "pubkey": '05' + xpk.encode().hex(), "timestamp": ts, "signature": sig, "pubkey_ed25519": sk.verify_key.encode().hex() }).encode() ]) r_good2 = omq.request_future(conn_ed, 'storage.retrieve', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": sig }).encode() ]) r_bad1 = omq.request_future( conn_x, 'storage.retrieve', [ json.dumps({ "pubkey": '05' + xpk.encode().hex(), "timestamp": ts, "signature": badsig, # invalid sig "pubkey_ed25519": sk.verify_key.encode().hex() }).encode() ]) r_bad2 = omq.request_future( conn_ed, 'storage.retrieve', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": badsig # invalid sig }).encode() ]) r_bad3 = omq.request_future( conn_ed, 'storage.retrieve', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), "timestamp": ts, #"signature": badsig # has timestamp but missing sig }).encode() ]) r_good1 = json.loads(r_good1.get()[0]) assert len(r_good1['messages']) == 1 msg = r_good1['messages'][0] assert msg['data'] == base64.b64encode(b'abc 123').decode() assert msg['timestamp'] == ts assert msg['expiration'] == exp assert msg['hash'] == hash1 r_good2 = json.loads(r_good2.get()[0]) assert len(r_good2['messages']) == 1 msg = r_good2['messages'][0] assert msg['data'] == base64.b64encode(b'def 456').decode() assert msg['timestamp'] == ts assert msg['expiration'] == exp assert msg['hash'] == hash2 assert r_bad1.get() == [b'401', b'retrieve signature verification failed'] assert r_bad2.get() == [b'401', b'retrieve signature verification failed'] assert r_bad3.get() == [ b'400', b"invalid request: Required field 'signature' missing" ]
def test_expire_all(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86_400_000 my_ss_id = '05' + sk.verify_key.encode().hex() def store_action(msg, ts): return { 'method': 'store', 'params': { 'pubkey': my_ss_id, 'timestamp': ts, 'ttl': ttl, 'data': msg } } r = [] r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [2000000] }, 'then': store_action(b64_m_yes, ts), 'else': store_action(b64_m_no, ts), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [19], 'height_before': 123456 }, 'then': store_action(b64_m_yes, ts + 1), 'else': store_action(b64_m_no, ts + 1), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [19], 'height_before': 123456789 }, 'then': store_action(b64_m_yes, ts + 2), 'else': store_action(b64_m_no, ts + 2), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [19] }, 'then': store_action(b64_m_yes, ts + 3), 'else': store_action(b64_m_no, ts + 3), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [19, 1] }, 'then': store_action(b64_m_yes, ts + 4), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_before': [19] }, 'then': store_action(b64_m_yes, ts + 5), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [19] }, 'else': store_action(b64_m_yes, ts + 6), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_before': [19] }, 'else': store_action(b64_m_no, ts + 7), }) ])) r.append( omq.request_future(conn, 'storage.ifelse', [ json.dumps({ 'if': { 'hf_at_least': [19] }, 'then': { 'method': 'ifelse', 'params': { 'if': { 'hf_at_least': [19] }, 'then': { 'method': 'ifelse', 'params': { 'if': { 'height_at_least': 100 }, 'then': { 'method': 'ifelse', 'params': { 'if': { 'v_at_least': [2, 2] }, 'then': { 'method': 'ifelse', 'params': { 'if': { 'hf_before': [99999, 99] }, 'then': { 'method': 'batch', 'params': { 'requests': [ store_action( b64_m_yes, ts + 8), store_action( b64_m_yes, ts + 9), store_action( b64_m_yes, ts + 10) ] } } } } } } } } } } }) ])) bad = omq.request_future(conn, 'storage.batch', [ json.dumps({ 'requests': [{ 'method': 'ifelse', 'params': { 'if': { 'hf_at_least': [19] }, 'then': { 'method': 'info', 'params': {} }, 'else': { 'method': 'info', 'params': {} } } }] }) ]) def hash(body, ts): return blake2b(f"{ts}{ts+ttl}".encode() + b'\x05' + sk.verify_key.encode() + body, encoder=Base64Encoder).decode().rstrip('=') for i in range(len(r)): r[i] = r[i].get() print(r[i]) assert len(r[i]) == 1 r[i] = json.loads(r[i][0]) if i not in (5, 6): assert 'result' in r[i] and r[i]['result'][ 'code'] == 200 and 'body' in r[i]['result'] assert not r[0]['condition'] assert r[0]['result']['body']['hash'] == hash(m_no, ts) assert not r[1]['condition'] assert r[1]['result']['body']['hash'] == hash(m_no, ts + 1) assert r[2]['condition'] assert r[2]['result']['body']['hash'] == hash(m_yes, ts + 2) assert r[3]['condition'] assert r[3]['result']['body']['hash'] == hash(m_yes, ts + 3) assert r[4]['condition'] assert r[4]['result']['body']['hash'] == hash(m_yes, ts + 4) assert not r[5]['condition'] assert 'result' not in r[5] assert r[6]['condition'] assert 'result' not in r[6] assert not r[7]['condition'] assert r[7]['result']['body']['hash'] == hash(m_no, ts + 7) x = r[8] assert x['condition'] # hf >= 19 assert x['result']['code'] == 200 x = x['result']['body'] assert x['condition'] # hf >= 19 assert x['result']['code'] == 200 x = x['result']['body'] assert x['condition'] # height >= 100 assert x['result']['code'] == 200 x = x['result']['body'] assert x['condition'] # v >= 2.2 assert x['result']['code'] == 200 x = x['result']['body'] assert x['condition'] # hf < 99999.99 assert x['result']['code'] == 200 x = x['result']['body'] x = x['results'] assert len(x) == 3 assert [y['code'] for y in x] == [200, 200, 200] assert x[0]['body']['hash'] == hash(m_yes, ts + 8) assert x[1]['body']['hash'] == hash(m_yes, ts + 9) assert x[2]['body']['hash'] == hash(m_yes, ts + 10)
def test_expire_extend(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) msgs = ss.store_n(omq, conn, sk, b"omg123", 10) now = int(time.time() * 1000) my_ss_id = '05' + sk.verify_key.encode().hex() for m in msgs: assert m["req"]["expiry"] < now + 60_000 exp_5min = now + 5 * 60 * 1000 exp_long = now + 15 * 24 * 60 * 60 * 1000 # Beyond max TTL, should get shortened to now + max TTL e = omq.request_future(conn, 'storage.sequence', [ json.dumps({ 'requests': [{ 'method': 'expire', 'params': { "pubkey": my_ss_id, "messages": [m["hash"] for m in msgs[0:8]], "expiry": exp_5min, "signature": sk.sign( f"expire{exp_5min}{''.join(m['hash'] for m in msgs[0:8])}" .encode(), encoder=Base64Encoder).signature.decode(), } }, { 'method': 'expire', 'params': { "pubkey": my_ss_id, "messages": [m["hash"] for m in msgs[8:]], "expiry": exp_long, "signature": sk.sign( f"expire{exp_long}{''.join(m['hash'] for m in msgs[8:])}" .encode(), encoder=Base64Encoder).signature.decode(), } }, { 'method': 'retrieve', 'params': { 'pubkey': my_ss_id, 'timestamp': now, 'signature': sk.sign(f"retrieve{now}".encode(), encoder=Base64Encoder).signature.decode(), } }] }) ]).get() assert len(e) == 1 e = json.loads(e[0]) assert [x['code'] for x in e['results']] == [200, 200, 200] e = [x['body'] for x in e['results']] assert 5 <= len(e[0]['swarm']) <= 10 for s in e[0]['swarm'].values(): assert s['expiry'] == exp_5min assert s['updated'] == sorted([m["hash"] for m in msgs[0:8]]) assert 5 <= len(e[1]['swarm']) <= 10 for s in e[1]['swarm'].values(): # expiry should have been shortened to now + max TTL: assert s['expiry'] < exp_long assert abs(s['expiry'] - 1000 * (time.time() + 14 * 24 * 60 * 60)) <= 5000 assert s['updated'] == sorted([m["hash"] for m in msgs[8:]]) assert set(m['hash'] for m in e[2]['messages']) == set(m['hash'] for m in msgs) exps = {m['hash']: m['expiration'] for m in e[2]['messages']} ts = {m['hash']: m['timestamp'] for m in e[2]['messages']} for m in msgs: assert ts[m['hash']] == m['req']['timestamp'] for m in msgs[0:8]: assert exps[m['hash']] == exp_5min for m in msgs[8:]: assert abs(exps[m['hash']] - 1000 * (time.time() + 14 * 24 * 60 * 60)) <= 5000
def test_sequence(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, 3) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Sequence some commands: s = omq.request_future(conn, 'storage.sequence', [ json.dumps({ "requests": [ { "method": "store", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"abc 123").decode(), }, }, { "method": "retrieve", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }, }, { "method": "store", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"xyz 123").decode(), }, }, { "method": "delete_all", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"delete_all{ts}".encode(), encoder=Base64Encoder).signature.decode(), }, }, { "method": "retrieve", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }, }, ], }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) assert "results" in s assert len(s["results"]) == 5 h0 = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') h1 = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'xyz 123', encoder=Base64Encoder).decode().rstrip('=') assert s["results"][0]["body"]["hash"] == h0 assert s["results"][1]["body"]["messages"] == [{ "data": "YWJjIDEyMw==", "expiration": ts + ttl, "hash": h0, "timestamp": ts }] assert s["results"][2]["body"]["hash"] == h1 assert len(s["results"][3]["body"]["swarm"]) > 0 for sw in s["results"][3]["body"]["swarm"].values(): assert set(sw["deleted"]) == {h0, h1} assert s["results"][4]["body"]["messages"] == []
def test_session_auth(omq, random_sn, sk, exclude): """ Session key handling is a bit convoluted because it follows Signal's messy approach of exposing the more specific x25519 pubkey rather than the more general ed25519 pubkey; this test's SS's ability to handle this messy key situation. """ xsk = sk.to_curve25519_private_key() xpk = xsk.public_key swarm = ss.get_swarm(omq, random_sn, xsk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) msgs = ss.store_n(omq, conn, xsk, b"omg123", 5) my_ss_id = '05' + xsk.public_key.encode().hex() ts = int(time.time() * 1000) to_sign = "delete_all{}".format(ts).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = {"pubkey": my_ss_id, "timestamp": ts, "signature": sig} resp = omq.request_future(conn, 'storage.delete_all', [json.dumps(params).encode()]).get() # Expect this to fail because we didn't pass the Ed25519 key assert resp == [b'401', b'delete_all signature verification failed'] # Make sure nothing was actually deleted: r = omq.request_future(conn, 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "pubkey_ed25519": sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 5 # Try signing with some *other* ed25519 key, which should be detected as not corresponding to # the x25519 pubkey and thus still fail fake_sk = SigningKey.generate() fake_sig = fake_sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params['pubkey_ed25519'] = fake_sk.verify_key.encode().hex() params['signature'] = fake_sig resp = omq.request_future(conn, 'storage.delete_all', [json.dumps(params).encode()]).get() assert resp == [b'401', b'delete_all signature verification failed'] # Make sure nothing was actually deleted: r = omq.request_future(conn, 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "pubkey_ed25519": sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 5 # Now send along the correct ed pubkey to make it work params['pubkey_ed25519'] = sk.verify_key.encode().hex() params['signature'] = sig resp = omq.request_future(conn, 'storage.delete_all', [json.dumps(params).encode()]).get() assert len(resp) == 1 r = json.loads(resp[0]) # Make sure SS is using the correct pubkey for the signatures (i.e. the session x25519 key) msg_hashes = sorted(m['hash'] for m in msgs) expected_signed = "".join((my_ss_id, str(ts), *msg_hashes)).encode() for k, v in r['swarm'].items(): assert v['deleted'] == msg_hashes edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(expected_signed, base64.b64decode(v['signature'])) # Verify deletion r = omq.request_future(conn, 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "pubkey_ed25519": sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert not r['messages']
def test_failing_sequence(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, 3) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl commands = { "requests": [ { "method": "store", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "namespace": 33, # will fail because no auth "ttl": ttl, "data": base64.b64encode(b"abc 123").decode(), }, }, { "method": "retrieve", "params": { "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }, }, ], } # Sequence some commands: s_s = omq.request_future(conn, 'storage.sequence', [json.dumps(commands).encode()]) s_b = omq.request_future(conn, 'storage.batch', [json.dumps(commands).encode()]) s_s = s_s.get() s_b = s_b.get() # The sequence should fail the store, and thus not attempt the retrieve: assert len(s_s) == 1 s_s = json.loads(s_s[0]) assert "results" in s_s assert len(s_s["results"]) == 1 assert s_s["results"][0]["code"] == 401 assert s_s["results"][0][ "body"] == "store: signature required to store to namespace 33" # The same thing as a batch should fail but also do the retrieve: assert len(s_b) == 1 s_b = json.loads(s_b[0]) assert "results" in s_b assert len(s_b["results"]) == 2 assert s_b["results"][0]["code"] == 401 assert s_b["results"][0][ "body"] == "store: signature required to store to namespace 33" assert s_b["results"][1]["code"] == 200 assert s_b["results"][1]["body"]["messages"] == []
def test_expire(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sns = ss.random_swarm_members(swarm, 2, exclude) conns = [omq.connect_remote(sn_address(sn)) for sn in sns] msgs = ss.store_n(omq, conns[0], sk, b"omg123", 10) my_ss_id = '05' + sk.verify_key.encode().hex() ts = msgs[6]['req']['expiry'] hashes = [msgs[i]['hash'] for i in (0, 1, 5, 6, 7, 9) ] + ['bepQtTaYrzcuCXO3fZkmk/h3xkMQ3vCh94i5HzLmj3I'] actual_update_msgs = sorted(msgs[i]['hash'] for i in (0, 1, 5)) hashes = sorted(hashes, reverse=True) to_sign = ("expire" + str(ts) + "".join(hashes)).encode() sig = sk.sign(to_sign, encoder=Base64Encoder).signature.decode() params = json.dumps({ "pubkey": my_ss_id, "messages": hashes, "expiry": ts, "signature": sig }).encode() resp = omq.request_future(conns[1], 'storage.expire', [params]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert set( r['swarm'].keys()) == {x['pubkey_ed25519'] for x in swarm['snodes']} # ( PUBKEY_HEX || EXPIRY || RMSG[0] || ... || RMSG[N] || UMSG[0] || ... || UMSG[M] ) expected_signed = "".join( (my_ss_id, str(ts), *hashes, *actual_update_msgs)).encode() for k, v in r['swarm'].items(): assert v['updated'] == actual_update_msgs edpk = VerifyKey(k, encoder=HexEncoder) try: edpk.verify(expected_signed, base64.b64decode(v['signature'])) except nacl.exceptions.BadSignatureError as e: print("Bad signature from swarm member {}".format(k)) raise e r = omq.request_future(conns[0], 'storage.retrieve', [ json.dumps({ "pubkey": my_ss_id, "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode() }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 10 for i in range(10): assert r['messages'][i]['expiration'] == ts if i in ( 0, 1, 5, 6) else msgs[i]['req']['expiry']
def test_store_retrieve_multiple(omq, random_sn, sk, exclude): sns = ss.random_swarm_members(ss.get_swarm(omq, random_sn, sk), 2, exclude) conn1 = omq.connect_remote(sn_address(sns[0])) basemsg = b"This is my message \x00<--that's a null, this is invalid utf8: \x80\xff" # Store 5 messages msgs = ss.store_n(omq, conn1, sk, basemsg, 5) # Retrieve all messages from the swarm (should give back the 5 we just stored): conn2 = omq.connect_remote(sn_address(sns[1])) ts = int(time.time() * 1000) resp = omq.request_future(conn2, 'storage.retrieve', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert len(r['messages']) == 5 for m in r['messages']: data = base64.b64decode(m['data']) source = next(x for x in msgs if x['hash'] == m['hash']) assert source['data'] == data assert source['req']['timestamp'] == m['timestamp'] assert source['req']['expiry'] == m['expiration'] # Store 6 more messages basemsg = b'another msg' new_msgs = ss.store_n(omq, conn2, sk, basemsg, 6, 1) # Retrieve using a last_hash so that we should get back only the 6: resp = omq.request_future(conn1, 'storage.retrieve', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "last_hash": msgs[4]['hash'], "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(resp) == 1 r = json.loads(resp[0]) assert len(r['messages']) == 6 for m in r['messages']: data = base64.b64decode(m['data']) source = next(x for x in new_msgs if x['hash'] == m['hash']) assert source['data'] == data assert source['req']['timestamp'] == m['timestamp'] assert source['req']['expiry'] == m['expiration'] # Give an unknown hash which should retrieve all: r = omq.request_future(conn2, 'storage.retrieve', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "last_hash": "0123456789012345678901234567890123456789123", "timestamp": ts, "signature": sk.sign(f"retrieve{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 11