def test_keyless_generichash(self): msg1 = b'Are you suggesting coconuts migrate?' msg2 = b'Not at all, they could be carried.' chash1 = libnacl.crypto_generichash(msg1) chash2 = libnacl.crypto_generichash(msg2) self.assertNotEqual(msg1, chash1) self.assertNotEqual(msg2, chash2) self.assertNotEqual(chash2, chash1)
def validateAuthenticator(self, message2, message, token, key): keyHash = libnacl.crypto_generichash(key, message) if keyHash is None: return False result = libnacl.crypto_onetimeauth_verify(token, message2, keyHash) return result
def test_key_generichash(self): msg1 = b'Are you suggesting coconuts migrate?' msg2 = b'Not at all, they could be carried.' key1 = libnacl.utils.rand_nonce() key2 = libnacl.utils.rand_nonce() khash1_1 = libnacl.crypto_generichash(msg1, key1) khash1_1_2 = libnacl.crypto_generichash(msg1, key1) khash1_2 = libnacl.crypto_generichash(msg1, key2) khash2_1 = libnacl.crypto_generichash(msg2, key1) khash2_2 = libnacl.crypto_generichash(msg2, key2) self.assertNotEqual(msg1, khash1_1) self.assertNotEqual(msg1, khash1_2) self.assertNotEqual(msg2, khash2_1) self.assertNotEqual(msg2, khash2_2) self.assertNotEqual(khash1_1, khash1_2) self.assertNotEqual(khash2_1, khash2_2) self.assertNotEqual(khash1_1, khash2_1) self.assertNotEqual(khash1_2, khash2_2) self.assertEqual(khash1_1, khash1_1_2)
def generateOTP(strVar=None, randKey=None): """ libnacl.crypto_generichash() uses the blake2 hash function: http://libnacl.readthedocs.io/en/latest/topics/raw_generichash.html There's a version of blake2 called blake2X which outputs longer hashes, however blake2X does not seem to be included in libnacl. Takes a string as a parameter, and a random key as an optional parameter, returns blake2 hash of the string. Example: generateOTP("quick brown fox") generateOTP("quick brown fox", "lazy dog") """ if not strVar: print("String required to generate OTP.") return 1 if randKey: otp = libnacl.crypto_generichash(str(strVar), randKey) else: otp = libnacl.crypto_generichash(str(strVar)) return otp
def createBinaryWriteCall(self, r, sig=None): "Creates a binary command representing arequest to insert a record." p = self.config.get('sync', 'writePassword', fallback=None) if not p: if not sig: raise RuntimeError( "You do not have the writePassword and this record is unsigned" ) d = { "writePassword": libnacl.crypto_generichash(p), "insertDocuments": [r, sig] } return encodeMessage(d)
def _runner(self): while self.sync[2] == 0: time.sleep(1) self._print_stats() self._joinall() fd = BytesIO() enc = bencode.BCodec(fd) hi = self.sync[0] lo = self.sync[1] si = _gen_si(self._keys) si['x'] = struct.pack('>QQ', lo, hi) enc.encode(si) pub = bytes(fd.getbuffer()) addr = zb32_encode(libnacl.crypto_generichash(pub)) return si['x'], addr
def encodeMessage(self, d, useWritePassword=False): "Given a JSON message, encode it so as to be suitable to send to another node" if useWritePassword and not self.writePassword: raise RuntimeError("You don't have a write password") pw = self.writePassword if useWritePassword else self.syncKey pw = pw.encode("utf8").ljust(32, b'\0') r = jsonEncode(d).encode('utf8') t = struct.pack("<Q", int(time.time() * 1000000)) r = libnacl.crypto_secretbox(r, t + b'\0' * 16, pw) return libnacl.crypto_generichash( pw)[:16] + self.localNodeVK + libnacl.crypto_sign( t + r, self.localNodeSK)
def makeAuthenticator(self, message, key): generic_hash = libnacl.crypto_generichash(message, key) if generic_hash is None: raise Error('generic hash undefined') token = libnacl.crypto_onetimeauth(self.publicKey, generic_hash) if len(self.publicKey) + len(key) + len(token) != 64: raise Error('Unexpected token size') authenticator = bytearray(self.publicKey) authenticator.extend(key) authenticator.extend(token) return authenticator
def apiCall(self, a): # Process one incoming binary API message # Get timestamp which is also the nonce t = a[:8] t = struct.unpack("<Q", t) # reject very old stuff if t < (time.time() - 3600) * 1000000: return {} # Get the key ID, which is just the hash of the key. k = a[8:40] # Get the data d = a[40:] if not k == libnacl.crypto_generichash(self.syncKey): return d = libnacl.crypto_secretbox_open(d, a[:8] + '\0' + 16, self.secretKey) d = json.loads(d) writePassword = d.get("writePassword", '') if writePassword and not writePassword == self.config.get( 'sync', 'writePassword', fallback=None): raise RuntimeError("Bad Password") r = {'records': []} if "getNewArrivals" in d: cur = self.conn.cursor() cur.execute( "SELECT json,signature FROM document WHERE json_extract(json,'$._time')>?", (d['getNewArrivals'], )) for i in cur: r['records'].append([i]) if "insertDocuments" in d: for i in 'insertDocuments': if writePassword: self.setDocument(i[0], i[1]) return r
def _gen_addr_tick(self, prefix, lo, hi, si): print(prefix) fd = BytesIO() addr = '' enc = bencode.BCodec(fd) while self.sync[2] == 0: si['x'] = struct.pack('>QQ', lo, hi) fd.seek(0, 0) enc.encode(si) pub = bytes(fd.getbuffer()) addr = zb32_encode(libnacl.crypto_generichash(pub)) if addr.startswith(prefix): self.sync[2] = 1 self.sync[0] = hi self.sync[1] = lo return hi += self._inc if hi == 0: lo += 1 self._c.value += 1
def keyedhash(self,d,k): return libnacl.crypto_generichash(d,k)
def __init__(self, filename): self.filename = os.path.abspath(filename) self.conn = sqlite3.connect(filename) self.config = configparser.ConfigParser() if os.path.exists(filename + ".conf"): self.config.read(filename + ".conf") self.conn.row_factory = sqlite3.Row # Self.conn.execute("PRAGMA wal_checkpoint=FULL") self.conn.execute("PRAGMA secure_delete = off") # Yep, we're really just gonna use it as a document store like this. self.conn.execute( '''CREATE TABLE IF NOT EXISTS document (rowid integer primary key, json text, signature text, localinfo text)''' ) self.conn.execute('''CREATE TABLE IF NOT EXISTS meta (key text primary key, value text)''') # To keep indexing simple and universal, it only works on three properties. _tags, _description and _body. self.conn.execute(''' CREATE VIRTUAL TABLE IF NOT EXISTS search USING fts5(tags, description, body, content='')''' ) self.conn.execute( '''CREATE INDEX IF NOT EXISTS document_parent ON document(json_extract(json,"$._parent")) WHERE json_extract(json,"$._parent") IS NOT null ''' ) self.conn.execute( '''CREATE INDEX IF NOT EXISTS document_link ON document(json_extract(json,"$._link")) WHERE json_extract(json,"$._link") IS NOT null''' ) self.conn.execute( '''CREATE INDEX IF NOT EXISTS document_name ON document(json_extract(json,"$._name"))''' ) self.conn.execute( '''CREATE INDEX IF NOT EXISTS document_id ON document(json_extract(json,"$._id"))''' ) self.conn.execute( '''CREATE INDEX IF NOT EXISTS document_type ON document(json_extract(json,"$._type"))''' ) self.conn.execute(""" CREATE TRIGGER IF NOT EXISTS search_index AFTER INSERT ON document BEGIN INSERT INTO search(rowid, tags, description, body) VALUES (new.rowid, IFNULL(json_extract(new.json,"$._tags"), ""), IFNULL(json_extract(new.json,"$._description"), ""), IFNULL(json_extract(new.json,"$._body"), "")); END; """) self.conn.execute( """ CREATE TRIGGER IF NOT EXISTS search_delete AFTER DELETE ON document BEGIN INSERT INTO search(search, rowid, tags, description, body) VALUES ('delete', old.rowid, IFNULL(json_extract(old.json,"$._tags"), ""), IFNULL(json_extract(old.json,"$._description"), ""), IFNULL(json_extract(old.json,"$._body"), "")); END;""") self.conn.execute(""" CREATE TRIGGER IF NOT EXISTS search_update AFTER UPDATE ON document BEGIN INSERT INTO search(search, rowid, tags, description, body) VALUES ('delete', old.rowid, IFNULL(json_extract(old.json,"$._tags"), ""), IFNULL(json_extract(old.json,"$._description"), ""), IFNULL(json_extract(old.json,"$._body"), "")); INSERT INTO search(rowid, tags, description, body) VALUES (new.rowid, IFNULL(json_extract(new.json,"$._tags"), ""), IFNULL(json_extract(new.json,"$._description"), ""), IFNULL(json_extract(new.json,"$._body"), "")); END; """) self.keys = configparser.ConfigParser() if os.path.exists(filename + ".keys"): self.keys.read(filename + ".keys") pk = base64.b64decode(self.keys.get('key', 'public', fallback='')) sk = base64.b64decode(self.keys.get('key', 'secret', fallback='')) # Generate a keypair for this particular node. if not (pk and sk): pk, sk = libnacl.crypto_sign_keypair() try: self.keys.add_section("key") except: pass self.keys.set('key', 'public', base64.b64encode(pk).decode('utf8')) self.keys.set('key', 'secret', base64.b64encode(sk).decode('utf8')) # Add our new key to the approved list, for our local copy. if 'approved' not in self.config: self.config.add_section('approved') self.config.set('approved', 'autogenerated', base64.b64encode(pk).decode()) self.saveConfig() self.publicKey = pk self.secretKey = sk if 'sync' not in self.config: self.config.add_section('sync') self.config.set('sync', 'syncKey', str(uuid.uuid4())) self.config.set('sync', 'writePassword', str(uuid.uuid4())) self.saveConfig() self.syncKey = self.config.get('sync', 'syncKey', fallback=None) if self.syncKey: databaseBySyncKey[libnacl.crypto_generichash(self.syncKey)] = self self.approvedPublicKeys = {} if 'approved' in self.config: for i in self.config['approved']: # Reverse lookup self.approvedPublicKeys[self.config['approved'][i]] = i
def __init__(self, msg, key=None): self.msg = msg self.key = key self.raw_digest = libnacl.crypto_generichash(msg, key) self.digest_size = len(self.raw_digest)
def __init__(self, address:Tuple[str,int]=('255.255.255.255',DEFAULT_PORT),clientID=None,psk=None,cipher=1,server=None,keypair=None, serverkey=None, handle=None,daemon=None): "Represents a Pavillion client that can both initiate and respond to requests" if daemon is None: daemon=pavillion.daemon #Last time we were known to be connected #We're trying to pretend to be connectionless, so #this is really just a guess of if there's at least one #server connected self.connectedAt =0 #The address of our associated server self.server_address = address #The default timeout. self.timeout = 2 #Used for optimizing the response timing self.fastestOverallCallResponse = 0.05 #Average response time for each type of call we know about #Listed by the RPC number self.averageResponseTimes = {} self.stdDevResponseTimes = {} #Used to keeo track of the optimization where some broadcasts are converted to unicasts. #We occasionally send real broadcasts for new server discovery. self.lastActualBroadcast = 0 #Our message counter self.counter = random.randint(1024,1000000000) self.server_counter = 0 self.cipher= ciphers[cipher] self.keypair = keypair self.server_pubkey = serverkey #Clients can be associated with a server self.server = server psk = preprocessKey(psk) self.psk = psk self.clientID = clientID self.lastChangedChallenge = time.time() self.challengelock = threading.Lock() self.targetslock = threading.Lock() self.lock = threading.Lock() self.nonce = os.urandom(32) self.challenge = os.urandom(16) self.usedServerNonces = {} #Conceptually, there is exactly one server, but in the case of multicast there's #multiple machines even if they all have the same key. self.max_servers = 128 #Known servers, indexed by (addr,port) self.known_servers = {} #Last sent message that was sent to the default address self._keepalive_time = time.time() self.skey = None self.messageTargets = {} if self.keypair == "guest": self.keypair = libnacl.crypto_box_keypair() if self.psk: self.key = self.cipher.keyedhash(self.nonce,psk) self.sessionID = os.urandom(16) elif self.keypair: self.key = os.urandom(32) self.sessionID = os.urandom(16) else: self.key= None self.sessionID = os.urandom(16) if not self.clientID: if self.keypair: self.clientID = libnacl.crypto_generichash(self.keypair[0])[:16] self_address = ('', 0) self.lock=threading.Lock() # Create the socket self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) # Bind to the server address self.sock.bind(self_address) self.sock.settimeout(1) if is_multicast(address[0]): # Create the socket self.msock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.msock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.msock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.msock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) # Bind to the server address self.msock.bind((self_address[0],self.server_address[1])) self.msock.settimeout(1) group = socket.inet_aton(address[0]) mreq = struct.pack('4sL', group, socket.INADDR_ANY) self.msock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) else: self.msock = False self.running = True def cl(*args): self.close() self.clfun = cl #If we have a handle, make it so that if the handle gets collected the server thread stops if handle: self.handle = weakref.ref(handle,cl) #lastseen time dicts indexed by the name of what you are subscribing to, then indexed by subscriber IP #This is a list of *other* machines that are subscribing. # A "subscription" can be implicit, a response to a multicast packet. #Or it can be an explicit subscribe message #If we get less responses than usual, we know we should retry. self.knownSubscribers = {} self.subslock = threading.Lock() self.waitingForAck = weakref.WeakValueDictionary() self.backoff_until = time.time() t = threading.Thread(target=self.loop) t.daemon = daemon t.name+=":PavillionClient" t.start() self._kathread = threading.Thread(target=self._keepAliveLoop) self._kathread.daemon = True self._kathread.name+=":PavillionClientKeepalive" self._kathread.start() #Attempt to connect. The protocol has reconnection built in, #But this lets us connect in advance if self.psk and self.clientID: pass self.sendNonceRequest() elif self.keypair: self.sendNonceRequest() pass else: self.synced = False counter = 8 while not self.synced and counter: self.sendSetup(0, 1, struct.pack("<B",self.cipher.id)+self.clientID+self.challenge) time.sleep(0.05) counter-=1
def keyedhash(self, d, k): return libnacl.crypto_generichash(d, k)
def setDocument(self, doc, signature=b''): if not signature and not self.secretKey: raise ValueError( "Cannot sign any new documents, you do not have the keys:" + str(self.secretKey)) if signature: if not isinstance(doc, str): raise ValueError( "Doc ,ust be an exact JSON string when providing a signature" ) key = signature.split(":") if not key in self.approvedPublicKeys: raise RuntimeError("Message was signed with a bad key") libnacl.crypto_sign_verify_detached( base64.b64decode( signature.split(":")[1], libnacl.crypto_generichash(doc.encode('utf8')), base64.b64decode(key))) doc = json.loads(doc) doc['time'] = doc.get('time', time.time() * 1000000) doc['arrival'] = doc.get('arrival', time.time() * 1000000) doc['id'] = doc.get('id', str(uuid.uuid4())) doc['name'] = doc.get('name', doc['id']) doc['type'] = doc.get('type', '') # If a UUID has been supplied, we want to erase any old record bearing that name. cur = self.threadLocal.conn.cursor() cur.execute( 'SELECT json_extract(json,"$.time") FROM document WHERE json_extract(json,"$.id")=?', (doc['id'], )) x = cur.fetchone() d = jsonEncode(doc) # If we are generating a new message, sign it automatically. if not signature: signature = libnacl.crypto_sign( libnacl.crypto_generichash(d.encode('utf8')), self.secretKey) if x: # Check that record we are trying to insert is newer, else ignore if x[0] < doc['time']: self.threadLocal.conn.execute( "UPDATE document SET json=?, signature=? WHERE json_extract(json,'$.id')=?", (d, signature, doc['id'])) # If we are marking this as deleted, we can ditch everything that depends on it. # We don't even have to just set them as deleted, we can relly delete them, the deleted parent record # is enough for other nodes to know this shouldn't exist anymore. if doc['type'] == "null": self.threadLocal.conn.execute( "DELETE FROM document WHERE json_extract(json,'$.id')=?", (doc['id'], )) return doc['id'] else: return doc['id'] self.threadLocal.conn.execute( "INSERT INTO document VALUES (null,?,?,?)", (d, signature, '')) return doc['id']
def handleBinaryAPICall(self, a, sessionObject=None): # Process one incoming binary API message. If part of a sesson, using a sesson objert enables certain features. #Get the key hint k = a[:16] a = a[16:] # Get timestamp which is also the nonce remoteNodeID = a[:32] a = a[32:] #Verify that it is from who we think a = libnacl.crypto_sign_open(a, remoteNodeID) tbytes = a[:8] t = struct.unpack("<Q", tbytes)[0] # reject very old stuff if t < (time.time() - 3600) * 1000000: return {} # Get the data d = a[8:] #We can use either the real key, or the write password, which is only used for "centralized mode" if k == libnacl.crypto_generichash( self.syncKey.encode("utf8").ljust(32, b'\0'))[:16]: openingKey = self.syncKey writePassword = False elif k == libnacl.crypto_generichash( self.writePassword.encode("utf8").ljust(32, b'\0'))[:16]: openingKey = self.writePassword writePassword = True else: raise RuntimeError("Bad key hint") openingKey = openingKey.encode("utf8").ljust(32, b'\0') d = libnacl.crypto_secretbox_open(d, a[:8] + b'\0' * 16, openingKey) d = json.loads(d) r = {'records': []} if sessionObject and not sessionObject.alreadyDidInitialSync: cur = self.threadLocal.conn.cursor() cur.execute("SELECT lastArrival FROM peers WHERE peerID=?", (remoteNodeID, )) c = cur.fetchone() if c: c = c[0] else: c = 0 #No falsy value allowed, that would mean don't get new arrivals r['getNewArrivals'] = c or 1 sessionObject.alreadyDidInitialSync = True if "getNewArrivals" in d: cur = self.threadLocal.conn.cursor() #Avoid dumping way too much at once cur.execute( "SELECT json,signature FROM document WHERE json_extract(json,'$.arrival')>? LIMIT 100", (d['getNewArrivals'], )) #Declares that there are no records left out in between this time and the first time we actually send r['recordsStartFrom'] = d['getNewArrivals'] for i in cur: if not 'records' in r: r['records'] = [] print(i) r['records'].append([i[0], base64.b64encode(i[1]).decode()]) sessionObject.lastResyncFlushTime = max( sessionObject.lastResyncFlushTime, json.loads(i[0])['arrival']) if "records" in d and d['records']: for i in d['records']: #No need sig verify, if we are using PW verification. #Set a flag to request that the server send us any records that came after the last one, self.setDocument(i[0], None if writePassword else i[1]) r['getNewArrivals'] = i[0]['arrival'] #Set a flag saying that cur = self.threadLocal.conn.cursor() #If the recorded lastArrival is less than the incoming recordsStartFrom, it would mean that there is a gap in which records #That we don't know about could be hiding. Don't update the timestamp in that case, as the chain is broken. #We can still accept new records, but we will need to request everything all over again starting at the breakpoint to fix this. cur.execute( "UPDATE peers SET lastArrival=? WHERE peerID=? AND lastArrival !=? AND lastArrival>=?", (r['getNewArrivals'], remoteNodeID, r['getNewArrivals'], r.get("recordsStartFrom"))) return self.encodeMessage(r)
def __init__(self, filename): self.filename = os.path.abspath(filename) self.threadLocal = threading.local() #Deterministically generate a keypair that we will use to sign all correspondance self.localNodeVK, self.localNodeSK = libnacl.crypto_sign_seed_keypair( libnacl.crypto_generichash( os.path.basename(filename).encode("utf8"), nodeID)) print(len(self.localNodeVK), self.localNodeVK, 90) #Websockets that are subscribing to us. self.subscribers = weakref.WeakValueDictionary() self.dbConnect() self.config = configparser.ConfigParser() if os.path.exists(filename + ".conf"): self.config.read(filename + ".conf") self.threadLocal.conn.row_factory = sqlite3.Row # self.threadLocal.conn.execute("PRAGMA wal_checkpoint=FULL") self.threadLocal.conn.execute("PRAGMA secure_delete = off") # Yep, we're really just gonna use it as a document store like this. self.threadLocal.conn.execute( '''CREATE TABLE IF NOT EXISTS document (rowid integer primary key, json text, signature text, localinfo text)''' ) self.threadLocal.conn.execute('''CREATE TABLE IF NOT EXISTS meta (key text primary key, value text)''') self.threadLocal.conn.execute('''CREATE TABLE IF NOT EXISTS peers (peerID text primary key, lastArrival integer, info text)''') # To keep indexing simple and universal, it only works on three properties. _tags, _description and _body. self.threadLocal.conn.execute(''' CREATE VIRTUAL TABLE IF NOT EXISTS search USING fts5(tags, description, body, content='')''' ) self.threadLocal.conn.execute( '''CREATE INDEX IF NOT EXISTS document_parent ON document(json_extract(json,"$.parent")) WHERE json_extract(json,"$.parent") IS NOT null ''' ) self.threadLocal.conn.execute( '''CREATE INDEX IF NOT EXISTS document_link ON document(json_extract(json,"$.link")) WHERE json_extract(json,"$.link") IS NOT null''' ) self.threadLocal.conn.execute( '''CREATE INDEX IF NOT EXISTS document_name ON document(json_extract(json,"$.name"))''' ) self.threadLocal.conn.execute( '''CREATE INDEX IF NOT EXISTS document_id ON document(json_extract(json,"$.id"))''' ) self.threadLocal.conn.execute( '''CREATE INDEX IF NOT EXISTS document_type ON document(json_extract(json,"$.type"))''' ) self.threadLocal.conn.execute(""" CREATE TRIGGER IF NOT EXISTS search_index AFTER INSERT ON document BEGIN INSERT INTO search(rowid, tags, description, body) VALUES (new.rowid, IFNULL(json_extract(new.json,"$.tags"), ""), IFNULL(json_extract(new.json,"$.description"), ""), IFNULL(json_extract(new.json,"$.body"), "")); END; """) self.threadLocal.conn.execute( """ CREATE TRIGGER IF NOT EXISTS search_delete AFTER DELETE ON document BEGIN INSERT INTO search(search, rowid, tags, description, body) VALUES ('delete', old.rowid, IFNULL(json_extract(old.json,"$.tags"), ""), IFNULL(json_extract(old.json,"$.description"), ""), IFNULL(json_extract(old.json,"$.body"), "")); END;""") self.threadLocal.conn.execute(""" CREATE TRIGGER IF NOT EXISTS search_update AFTER UPDATE ON document BEGIN INSERT INTO search(search, rowid, tags, description, body) VALUES ('delete', old.rowid, IFNULL(json_extract(old.json,"$.tags"), ""), IFNULL(json_extract(old.json,"$.description"), ""), IFNULL(json_extract(old.json,"$.body"), "")); INSERT INTO search(rowid, tags, description, body) VALUES (new.rowid, IFNULL(json_extract(new.json,"$.tags"), ""), IFNULL(json_extract(new.json,"$.description"), ""), IFNULL(json_extract(new.json,"$.body"), "")); END; """) self.keys = configparser.ConfigParser() if os.path.exists(filename + ".keys"): self.keys.read(filename + ".keys") pk = base64.b64decode(self.keys.get('key', 'public', fallback='')) sk = base64.b64decode(self.keys.get('key', 'secret', fallback='')) # Generate a keypair for this particular node. if not (pk and sk): pk, sk = libnacl.crypto_sign_keypair() try: self.keys.add_section("key") except: pass self.keys.set('key', 'public', base64.b64encode(pk).decode('utf8')) self.keys.set('key', 'secret', base64.b64encode(sk).decode('utf8')) # Add our new key to the approved list, for our local copy. if 'approved' not in self.config: self.config.add_section('approved') self.config.set('approved', 'autogenerated', base64.b64encode(pk).decode()) self.saveConfig() self.publicKey = pk self.secretKey = sk if 'sync' not in self.config: self.config.add_section('sync') self.config.set('sync', 'syncKey', base64.b64encode(os.urandom(24)).decode('utf8')) self.config.set('sync', 'writePassword', base64.b64encode(os.urandom(24)).decode('utf8')) self.saveConfig() self.syncKey = self.config.get('sync', 'syncKey', fallback=None) self.writePassword = self.config.get('sync', 'writePassword', fallback='') if self.syncKey: databaseBySyncKeyHash[libnacl.crypto_generichash( self.syncKey.encode("utf8"))[:16]] = self if self.writePassword: databaseBySyncKeyHash[libnacl.crypto_generichash( self.writePassword.encode("utf8"))[:16]] = self print(list(databaseBySyncKeyHash.keys())) self.approvedPublicKeys = {} if 'approved' in self.config: for i in self.config['approved']: # Reverse lookup self.approvedPublicKeys[self.config['approved'][i]] = i