def cert_gen(self, fn): from . import selfsigned a, b, c = selfsigned.generate_selfsigned_cert("nonsense.example") # This is NOT meant to be anyone's primary means of security! It is only for a very basic # layer that keeps casual remote attackers out. The password can also be reused to put a bit of freetext in the domain. # That is how insecure it is!! # Nonetheless it will probably stop most people IRL. # Also, this is not really base32, it's random freetext. User could use any length he wants. self.password = base64.b32encode(os.urandom(8)).decode().replace( "=", '').lower() with open(fn, "wt") as f: f.write(a.decode("utf-8")) os.chmod(fn, stat.S_IRWXU) with open(fn + '.private', "wt") as f: f.write(b.decode("utf-8")) os.chmod(fn + '.private', stat.S_IRWXU) with open(fn + '.hash', "wt") as f: f.write(self.password + '-' + blake2b(c, encoder=nacl.encoding.RawEncoder())[:20].hex()) self.keyhash = blake2b(c, encoder=nacl.encoding.RawEncoder())[:20] os.chmod(fn + '.hash', stat.S_IRWXU)
def test_batch_bt(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, 3) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store two messages for myself s = omq.request_future(conn, 'storage.batch', [ bt_serialize({ "requests": [ { "method": "store", "params": { "pubkey": '03' + sk.verify_key.encode().hex(), 'namespace': 42, "timestamp": ts, "ttl": ttl, "data": b"abc 123", "signature": sk.sign( f"store42{ts}".encode()).signature, }, }, { "method": "store", "params": { "pubkey": '03' + sk.verify_key.encode().hex(), 'namespace': 42, "timestamp": ts, "ttl": ttl, "data": b"xyz 123", "signature": sk.sign( f"store42{ts}".encode()).signature, }, }, ], }) ]).get() assert len(s) == 1 s = bt_deserialize(s[0]) assert b"results" in s assert len(s[b"results"]) == 2 assert s[b"results"][0][b"code"] == 200 assert s[b"results"][1][b"code"] == 200 hash0 = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'42' + b'abc 123', encoder=Base64Encoder).rstrip(b'=') hash1 = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'42' + b'xyz 123', encoder=Base64Encoder).rstrip(b'=') assert s[b"results"][0][b"body"][b"hash"] == hash0 assert s[b"results"][1][b"body"][b"hash"] == hash1
def _negotiate_encryption(self, server_pub_key, server_auth=None): six.print_('server public_key: %s' % server_pub_key.hex()) self.server_box = None if blake2b(server_pub_key + get_cas_secret(), encoder=RawEncoder) != server_auth: raise RuntimeError('Bad Server Auth! %s' % server_auth) self.private_key = PrivateKey.generate() my_pub_key = bytes(self.private_key.public_key) six.print_('client public_key: %s' % my_pub_key.hex()) auth = blake2b(my_pub_key + get_cas_secret(), encoder=RawEncoder) self._send(b'N' + my_pub_key + auth) resp = self.rfile.read(2) if resp == b'OK': self.server_box = Box(self.private_key, PublicKey(server_pub_key)) else: six.print_(resp)
def welcome(self, remote_addr): self.stats['total-conn'] += 1 self.stats['open-conn'] += 1 if self.debug: print('CONNECT %s %s' % (self.stats['open-conn'], remote_addr)) auth = blake2b((self.public_key + get_cas_secret()), encoder=RawEncoder) return self.public_key + auth
def run(self): # sleep at the beginning time.sleep(self.interval) while True: imagpath = self.orders_queue.get() logging.info(f"process image {imagpath}") with open(imagpath, 'rb') as fp: # calculate the digest hexdigest = blake2b(data=fp.read(), digest_size=64, encoder=nacl.encoding.HexEncoder).decode("utf-8") logging.info(f"digest for {imagpath} is {hexdigest}") row = self.db.select("select caption from captions where hexdigest = ?", (hexdigest,)) if row is not None: # image exists caption = row.get('caption') logging.info(f"image {imagpath}, caption {caption}, already processed ") else: logging.info(f"generate caption for {imagpath} started") try: # generate caption caption, p = self.ai.image_caption(imagpath) except Exception as e: logging.error(f"caption for {imagpath} error: {e}") # this will als self.db.execute("insert into captions(hexdigest,file_name, caption,probability) values (?,?,?,?)", (hexdigest, os.path.basename(imagpath), caption, p)) logging.info(f"image {imagpath}, caption {caption}, {p}")
def read_key(password): password_raw = password.encode('utf8') #read private.key file with open('keystore/private.key', 'r') as private_file: private_key = private_file.read() #reads salt file with open('keystore/salt.txt', 'r') as salt_file: salt_clean = salt_file.read() #converts salt back to byte format derivation_salt = base64.b64decode(salt_clean) private_key = base64.b64decode(private_key) #recreates blake2b key derived_key = blake2b(password_raw, salt=derivation_salt, encoder=nacl.encoding.RawEncoder) #decrypts private key box = nacl.secret.SecretBox(derived_key) private_key_decrypted = box.decrypt(private_key) return private_key_decrypted.decode('utf8')
def transfer() -> str: request_data = j.data.serializers.json.loads(request.body.read()) destination = request_data["destination"] distributor_wallet = j.clients.stellar.get(FAUCET_WALLET) asset = distributor_wallet.get_asset("TFT") # asset.code:asset.issuer import nacl from nacl import hash hashed_wallet = hash.blake2b(destination.encode("utf-8"), encoder=nacl.encoding.RawEncoder) try: distributor_wallet.transfer( destination_address=destination, amount=TRANSFER_AMOUNT, asset=f"{asset.code}:{asset.issuer}", memo_hash=hashed_wallet, ) except Exception as e: raise j.exceptions.Base(e) return j.data.serializers.json.dumps({"data": "Transfer complete"})
def is_valid_hash(self, ballot_hash, ballot): """Tests that a ballot hash is correct.""" ballot_data = json.dumps(ballot, ensure_ascii=False).encode('utf-8') new_hash = blake2b(ballot_data, encoder=Base64Encoder) if ballot_hash != new_hash: return False return True
def test_store_retrieve_unauthenticated(omq, random_sn, sk, exclude): """Attempts to retrieve messages without authentication. This should fail (as of HF19).""" sns = ss.random_swarm_members(ss.get_swarm(omq, random_sn, sk), 2, exclude) conn1 = omq.connect_remote(sn_address(sns[0])) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message for myself s = omq.request_future(conn1, 'storage.store', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"abc 123").decode() }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert all(v['hash'] == hash for v in s['swarm'].values()) conn2 = omq.connect_remote(sn_address(sns[1])) r = omq.request_future( conn2, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex() }).encode()]).get() assert r == [b'401', b'retrieve: request signature required']
def make_subkey(sk, subuser_pk: VerifyKey): # Typically we'll do this, though in theory we can generate any old 32-byte value for c: a = sodium.crypto_sign_ed25519_sk_to_curve25519(sk.encode() + sk.verify_key.encode()) c = blake2b(sk.verify_key.encode() + subuser_pk.encode(), digest_size=32, encoder=RawEncoder) d = sodium.crypto_core_ed25519_scalar_mul( a, sodium.crypto_core_ed25519_scalar_add( c, blake2b(c + sk.verify_key.encode(), key=b'OxenSSSubkey', digest_size=32, encoder=RawEncoder))) D = sodium.crypto_scalarmult_ed25519_base_noclamp(d) return c, d, D
def do_unseal_vote(self, commitment_key, vote): commitment = blake2b(obj2bytes(vote), key=commitment_key) user_token = self._sealed_vote_cache.get(commitment) if user_token is None: raise Exception('VoteTallier: unsealed commitment not found in sealed votes') else: self.unsealed_votes[user_token] = (commitment_key, vote)
def test_retrieve_subkey(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk, 3) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message for myself, using master key s = omq.request_future(conn, 'storage.store', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), 'namespace': 42, "timestamp": ts, "ttl": ttl, "data": base64.b64encode(b"abc 123").decode(), "signature": sk.sign(f"store42{ts}".encode(), encoder=Base64Encoder).signature.decode(), }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x03' + sk.verify_key.encode() + b'42' + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') for k, v in s['swarm'].items(): assert hash == v['hash'] # Retrieve it using a subkey dude_sk = SigningKey.generate() c, d, D = make_subkey(sk, dude_sk.verify_key) to_sign = f"retrieve42{ts}".encode() sig = blinded_ed25519_signature(to_sign, dude_sk, d, D) r = omq.request_future(conn, 'storage.retrieve', [ json.dumps({ "pubkey": '03' + sk.verify_key.encode().hex(), "namespace": 42, "timestamp": ts, "signature": base64.b64encode(sig).decode(), "subkey": base64.b64encode(c).decode(), }).encode() ]).get() assert len(r) == 1 r = json.loads(r[0]) assert r["hf"] >= [19, 0] assert len(r["messages"]) == 1 assert r["messages"][0]["hash"] == hash
def test_redis_kvs(self): """ js_shell 'j.tools.memusagetest.test_redis_kvs()' """ memusage_start = j.application.getMemoryUsage() print("MEM USAGE START:%s" % memusage_start) j.clients.redis.core_stop() r_classic = j.clients.redis.core_get() s = r_classic.register_script("%s/test.lua" % self._dirpath) rest = r.evalsha(s.sha, 2, "aaa", "bbb") j.shell() r = j.data.bcdb.redis pids = j.sal.process.getPidsByFilter("redis-server") assert len(pids) == 1 p = j.sal.process.getProcessObject(pids[0]) info = p.memory_full_info() redis_mem_start = info.uss j.tools.timer.start("memusage") from nacl.hash import blake2b from nacl import encoding from pyblake2 import blake2b nritems = 100000 for item in range(nritems): hash = blake2b(str(item).encode(), digest_size=8).digest() # can do 900k per second try: r.execute("HSET", hash[0:2], hash[2:], b"aaaa") except: j.shell() e j.tools.timer.stop(nritems) memusage_stop = j.application.getMemoryUsage() print("MEM USAGE STOP:%s" % memusage_stop) print("MEM USAGE difference in KB:%s" % (memusage_stop - memusage_start)) # p=j.sal.process.getProcessObject(pids[0]) info = p.memory_full_info() redis_mem_stop = info.uss print("REDIS MEM USAGE difference in KB:%s" % ((redis_mem_stop - redis_mem_start) / 1024))
def hmac_generation(password, key): """Returns keyed-hash message authentication code given a message (password) and a secret key (key)""" #original #return hmac.new(password, key, digestmod=hashlib.sha256).digest() # nacl hash integrity check without key #return hash.sha256(password, encoder=encoding.HexEncoder) # key limited to 64 byte key = key[:64] return hash.blake2b(password, key=key, encoder=encoding.HexEncoder)
def print_encrypted_ccnumbers(ccnumbers, max_count): for i in range(0, max_count): random.shuffle(ccnumbers) for c in ccnumbers: if random.random() < 0.5: h = blake2b(c, key=auth_key, encoder=nacl.encoding.HexEncoder)[0:8] print("\\\\x" + h + binascii.hexlify(box.encrypt(c)) + "\t" + c)
def doDHTLookup(self): """Perform a DHT lookup using the public OpenDHT proxy service. We don't cache the result of this, we just rate limit. and let the connection thread cache the same data that it will get via the server. This is pretty separate from the normal caching. """ # Lock is needed mostly to avoid confusion in ratelimit logic when debugging with dhtlock: import requests if self.lastTriedDHT > (time.time() - 60): # Rate limit queries to the public DHT proxy to one per minute return [] self.lastTriedDHT = time.time() # Rolling code changes the DHT key every 24 hours, ensuring that we don't heavily load down any particular # DHT node for more than a day, if there is somehow an incredibly popular site. # It also gives less information to people who don't know the unhashed ID, who may want to # spy on when your service is up, or some crap like that. timePeriod = struct.pack("<Q", int(time.time() / (3600 * 24))) rollingCode = blake2b(bytes.fromhex(self.infohash) + timePeriod, encoder=nacl.encoding.RawEncoder())[:20] # Use SHA1 here as it is openDHT custom k = hashlib.sha1(rollingCode.hex().encode()).digest()[:20].hex() r = None lines = [] # Prioritized DHT proxies list for i in getDHTProxies(): logger.info("Trying DHT Proxy request to: " + i + k) try: r = requests.get(i + k, timeout=20, stream=True) for j in r.iter_lines(): if j: lines.append(j) break break except: logger.info(traceback.format_exc()) logger.info("DHT Proxy request to: " + i + " failed for" + k) if lines: # This only tries one item, which is a little too easy to DoS, but that's also part of the inherent problem with DHTs. # By randomizing, we allow for some very basic load balancing, although nodes will stay pinned to their chosen node until failure. d = base64.b64decode( json.loads(random.choice(lines).strip())['data']).decode() # Return a list of candidates to try return parseHostsList(d) return []
def test_legacy_closed_ns(omq, random_sn, sk, exclude): # For legacy closed groups the secret key is generated but then immediately discarded; it's only # used to generate a primary key storage address: swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # namespace -10 is a special, no-auth namespace for legacy closed group messages. sclosed = omq.request_future(conn, 'storage.store', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "namespace": -10, "data": base64.b64encode("blah blah".encode()).decode()})]) sclosed = json.loads(sclosed.get()[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'-10' + b'blah blah', encoder=Base64Encoder).decode().rstrip('=') assert len(sclosed["swarm"]) == len(swarm['snodes']) edkeys = {x['pubkey_ed25519'] for x in swarm['snodes']} for k, v in sclosed['swarm'].items(): assert k in edkeys assert hash == v['hash'] edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(v['hash'].encode(), base64.b64decode(v['signature'])) # NB: assumes the test machine is reasonably time synced assert(ts - 30000 <= sclosed['t'] <= ts + 30000) # Now retrieve it: this is the only namespace we can access without authentication r = omq.request_future(conn, 'storage.retrieve', [json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "namespace": -10, }).encode()]) r = r.get() assert len(r) == 1 r = json.loads(r[0]) assert len(r['messages']) == 1 msg = r['messages'][0] assert base64.b64decode(msg['data']) == b'blah blah' assert msg['timestamp'] == ts assert msg['expiration'] == exp assert msg['hash'] == hash
def tally_votes(): tally = {} for token, (commitment_key, vote) in vote_tallier.unsealed_votes.items(): commitment = blake2b(obj2bytes(vote), key=commitment_key) ledger_commitment = vote_tallier.sealed_votes[token][0] assert ledger_commitment == commitment tally[vote] = tally.get(vote, 0) + 1 return tally
def handle_N(self, message): """ encryption key negotiation """ pub_key = PublicKey(message[:32]) auth = message[32:64] if blake2b(bytes(pub_key) + get_cas_secret(), encoder=RawEncoder) != auth: raise RuntimeError('Bad Client Auth! %s' % auth) self.client_box = Box(self.private_key, pub_key) if self.debug: print('server public_key: %s' % self.public_key.hex()) print('client public_key: %s' % bytes(pub_key).hex()) return [b'OK']
def generate(sent, recv, incoming=False): if incoming: init_msg = recv resp_msg = sent else: init_msg = sent resp_msg = recv nonce_init_to_resp = blake2b(init_msg + resp_msg + b"Init -> Resp", encoder=RawEncoder)[0:Nonce.SIZE] nonce_resp_to_init = blake2b(init_msg + resp_msg + b"Resp -> Init", encoder=RawEncoder)[0:Nonce.SIZE] a = Nonce.from_bin(nonce_init_to_resp, 'big') b = Nonce.from_bin(nonce_resp_to_init, 'big') return { 'local': a, 'remote': b } if incoming else { 'local': b, 'remote': a }
def generate_token(self, email): token_uuid = uuid4().hex token_hash = blake2b(token_uuid.encode(), key=self.hmac_key) expires_in = datetime.now() + timedelta(hours=2) expires_in = mktime(expires_in.utctimetuple()) token = { 'token': token_hash.decode('utf-8'), 'expiresIn': expires_in, } yield self.db.users.update_one({'email': email}, {'$set': token}) return token
def unregister(self, hash): hash = hash.split("-")[-1] h = bytes.fromhex(hash) doublehash = blake2b(h, encoder=nacl.encoding.RawEncoder())[:20].hex() try: # Lookup by hash or rollingCode, store by fullhash. del self.activeHashes[hash] except KeyError: pass try: del self.activeHashes[doublehash] except KeyError: pass
def dhtPublish(self): # Publish this service to the DHT for WAN discovery. if not self.useDHT: return tryDHTConnect() timePeriod = struct.pack("<Q", int(time.time() / (3600 * 24))) rollingCode = blake2b(self.keyhash + timePeriod, encoder=nacl.encoding.RawEncoder())[:20] # We never actually know if this will be available on the platform or not if dhtContainer[0]: try: import opendht as dht with dhtlock: dhtContainer[0].put( dht.InfoHash.get(rollingCode.hex()), dht.Value(getWanHostsString().encode())) except Exception: logger.info("Could not use local DHT node") logger.info(traceback.format_exc()) return # Using a DHT proxy we can host a site without actually using the DHT directly. # This is for future direct-from-android hosting. for i in getDHTProxies(): import requests try: data = { "data": base64.b64encode(getWanHostsString().encode()).decode(), "id": "id 1", "seq": 0, "type": 3 } url = i + hashlib.sha1( rollingCode.hex().encode()).digest()[:20].hex() r = requests.post(url, data=json.dumps(data)) r.raise_for_status() break except Exception: logger.info(traceback.format_exc())
def verify_endpoint(): """ Verifies if a JWT is valid. """ if request.method == 'POST': if not app.config["JWT"]: return jsonify(message="JWT verification is not enabled"), 501 request_json = request.get_json(force=True, cache=False) token = request_json.get('jwt', None) if token is None: return jsonify(message="No JWT provided"), 400 try: claims = jwt.get_unverified_claims(token) except: return jsonify(message="Invalid JWT"), 400 master_key = app.config["JWT_MASTER_KEY"] algorithm = app.config["JWT_ALGORITHM"] issuer = app.config['APP_NAME'] try: subject = claims["sub"].encode('utf-8') salt = claims["x"].encode('utf-8') except KeyError: return jsonify(message="Invalid claims in JWT", valid=False), 401 # jwt.decode() requires secret_key to be a str, so it must be decoded secret_key = blake2b(b'', key=master_key, salt=salt, person=subject).decode('utf-8') # exception is raised if token has expired, signature verification fails, etc. try: jwt.decode(token=token, key=secret_key, algorithms=algorithm, issuer=issuer) except Exception as err: structured_log(level='info', msg="Failed to verify JWT", error=err) return jsonify(message="Failed to verify JWT", valid=False), 401 statsd.client.incr("jwt_verified") structured_log(level='info', msg="JWT successfully verified", user=f"'{subject}'") return jsonify(message="JWT successfully verified", valid=True), 200
def calcRollingCode(self, hash): # Password isn't part of discovery at all hash = hash.split("-")[-1] # Use double hashes for lookups, because we might be doing a lookup in public on someone eles's wifi h = bytes.fromhex(hash) #New clients use the rolling code method. This is so that whenever you are on a public network that is not #the same network as the server, we don't reveal much information about what sites we are looking for, #which would allow fingerprinting based tracking. #This limits your trackability time because the code changes. #Note that because of traffic sniffing of the actual server connection, this is basically meaningless #Except for on networks with isolation between clients and where the attacker is not the network operator. #It's really just a slight bit of protection done opportinistically because it is so easy to implement. timePeriod = struct.pack("<Q", int(time.time() / (3600 * 24))) return blake2b(h + timePeriod, encoder=nacl.encoding.RawEncoder())[:20].hex().lower()
def register(self, hash, port, info, addr=None, n=1): #Port must be a list where the first item is the actual port!!! #This is so it can be mutable and changed later. # Ok, so the client should never broadcast the full hash, he might be in a coffee shop or something, roaming where attackers are. # So the lookup part of discovery only uses the certificate digest part of the hash. # Howver, for discovery listing purposes, the server must broadcast the full hash, everything needed to connect. This is safer, because servers roam less, # And we are only trying to provide opportunistic protection for the names. # The reason we use double hashes is for WPA3. If you roam to a WPA3 coffee shop, you will not reveal the hash to anyone except the router owner, because the # unhashed digest in the TLS SNI is unicast, which WPA3 keeps secret from other guests. fullhash = hash hash = hash.split("-")[-1] h = bytes.fromhex(hash) doublehash = blake2b(h, encoder=nacl.encoding.RawEncoder())[:20].hex() # Lookup by hash or rollingCode, store by fullhash. self.activeHashes[hash] = (port, info, fullhash) self.activeHashes[doublehash] = (port, info, fullhash)
def generate_keypair(password): #converts password string to bytes sequence password_raw = password.encode('utf8') #generates a secret key based on the password input (blake2b will hash the person data) derivation_salt = nacl.utils.random(16) derived_key = blake2b(password_raw, salt=derivation_salt, encoder=nacl.encoding.RawEncoder) #saves the salt to the salt.txt file salt_clean = base64.b64encode(derivation_salt).decode('utf8') with open('keystore/salt.txt', 'w') as salt_file: salt_file.write(salt_clean) #generates a keypair private_key = nacl.signing.SigningKey.generate() #converts key to base64 format private_key = private_key.encode(Base64Encoder).decode('utf8') #encrypts the private key box = nacl.secret.SecretBox(derived_key) private_key_encrypted = box.encrypt(private_key.encode('utf8')) #converst to base64 again lol private_key_encrypted = base64.b64encode(private_key_encrypted).decode( "utf8") #saves the key to the keystore files with open('keystore/private.key', 'w') as private_file: private_file.write(private_key_encrypted) private_file.close()
def test_store(omq, random_sn, sk, exclude): swarm = ss.get_swarm(omq, random_sn, sk) sn = ss.random_swarm_members(swarm, 1, exclude)[0] conn = omq.connect_remote(sn_address(sn)) ts = int(time.time() * 1000) ttl = 86400000 exp = ts + ttl # Store a message for myself s = omq.request_future(conn, 'storage.store', [ json.dumps({ "pubkey": '05' + sk.verify_key.encode().hex(), "timestamp": ts, "ttl": ttl, "data": base64.b64encode("abc 123".encode()).decode() }).encode() ]).get() assert len(s) == 1 s = json.loads(s[0]) hash = blake2b("{}{}".format(ts, exp).encode() + b'\x05' + sk.verify_key.encode() + b'abc 123', encoder=Base64Encoder).decode().rstrip('=') assert len(s["swarm"]) == len(swarm['snodes']) edkeys = {x['pubkey_ed25519'] for x in swarm['snodes']} for k, v in s['swarm'].items(): assert k in edkeys assert hash == v['hash'] edpk = VerifyKey(k, encoder=HexEncoder) edpk.verify(v['hash'].encode(), base64.b64decode(v['signature'])) # NB: assumes the test machine is reasonably time synced assert (ts - 30000 <= s['t'] <= ts + 30000)
def store_n(omq, conn, sk, basemsg, n, offset=0, netid=5): msgs = [] pubkey = chr(netid).encode() + (sk.verify_key if isinstance( sk, SigningKey) else sk.public_key).encode() for i in range(n): data = basemsg + f"{i}".encode() ts = int((time.time() - i) * 1000) exp = int((time.time() - i + 30) * 1000) msgs.append({ "data": data, "req": { "pubkey": pubkey.hex(), "timestamp": ts, "expiry": exp, "data": base64.b64encode(data).decode() } }) msgs[-1]['future'] = omq.request_future( conn, "storage.store", [json.dumps(msgs[-1]['req']).encode()]) msgs[-1]['hash'] = blake2b("{}{}".format(ts, exp).encode() + pubkey + msgs[-1]['data'], encoder=Base64Encoder).decode().rstrip('=') assert len({m['hash'] for m in msgs}) == len(msgs) for m in msgs: resp = m['future'].get() assert len(resp) == 1 m['store'] = json.loads(resp[0].decode()) assert len(m['store']['swarm']) >= 5 assert not any('failed' in v for v in m['store']['swarm'].values()) assert all(v['hash'] == m['hash'] for v in m['store']['swarm'].values()) return msgs
def _blacke2b_digest(data): """create a blacke2b 32 bit raw encoded digest""" return blake2b(data=data, digest_size=32, encoder=nacl.encoding.RawEncoder)