def encrypt(self): # peer, data plaintext = "" encrypted = "" # detect if signing is enabled sign = True try: if sys.argv[3].lower() == 'false': sign = False except IndexError: pass try: if not stringvalidators.validate_pub_key(sys.argv[2]): raise onionrexceptions.InvalidPubkey except (ValueError, IndexError) as e: logger.error("Peer public key not specified", terminal=True) except onionrexceptions.InvalidPubkey: logger.error("Invalid public key", terminal=True) else: pubkey = sys.argv[2] # Encrypt if public key is valid logger.info("Please enter your message (ctrl-d or -q to stop):", terminal=True) try: for line in sys.stdin: if line == '-q\n': break plaintext += line except KeyboardInterrupt: sys.exit(1) # Build Message to encrypt data = {} myPub = keypair[0] if sign: data['sig'] = signing.ed_sign(plaintext, key=keypair[1], encodeResult=True) data['sig'] = bytesconverter.bytes_to_str(data['sig']) data['signer'] = myPub data['data'] = plaintext data = json.dumps(data) plaintext = data encrypted = encryption.pub_key_encrypt(plaintext, pubkey, encodedData=True) encrypted = bytesconverter.bytes_to_str(encrypted) logger.info( 'Encrypted Message: \n\nONIONR ENCRYPTED DATA %s END ENCRYPTED DATA' % (encrypted, ), terminal=True)
def addKey(self, pubKey=None, privKey=None): '''Add a new key pair, either specified or none to generate a new pair automatically''' if type(pubKey) is type(None) and type(privKey) is type(None): pubKey, privKey = generate.generate_pub_key() pubKey = bytesconverter.bytes_to_str(pubKey) privKey = bytesconverter.bytes_to_str(privKey) try: if pubKey in self.getPubkeyList(): raise ValueError('Pubkey already in list: %s' % (pubKey, )) except FileNotFoundError: pass with open(self.keyFile, "a") as keyFile: keyFile.write(pubKey + ',' + privKey + '\n') return (pubKey, privKey)
def client_api_insert_block(): encrypt: bool = False insert_data: JSONSerializable = request.get_json(force=True) message = insert_data['message'] message_hash = bytesconverter.bytes_to_str(hashers.sha3_hash(message)) kv: 'DeadSimpleKV' = g.too_many.get_by_string('DeadSimpleKV') # Detect if message (block body) is not specified if type(message) is None: return 'failure due to unspecified message', 400 # Detect if block with same message is already being inserted if message_hash in kv.get('generating_blocks'): return 'failure due to duplicate insert', 400 else: kv.get('generating_blocks').append(message_hash) encrypt_type = '' sign = True meta = {} to = '' try: if insert_data['encrypt']: to = insert_data['to'].strip() if "-" in to: to = mnemonickeys.get_base32(to) encrypt_type = 'asym' except KeyError: pass try: if not insert_data['sign']: sign = False except KeyError: pass try: bType = insert_data['type'] except KeyError: bType = 'bin' try: meta = json.loads(insert_data['meta']) except KeyError: pass try: # The setting in the UI is for if forward secrecy is enabled, not disabled disable_forward_secrecy = not insert_data['forward'] except KeyError: disable_forward_secrecy = False threading.Thread(target=onionrblocks.insert, args=(message, ), kwargs={ 'header': bType, 'encryptType': encrypt_type, 'sign': sign, 'asymPeer': to, 'meta': meta, 'disableForward': disable_forward_secrecy }).start() return Response('success')
def get_block_data(self, bHash, decrypt=False, raw=False, headerOnly=False): if not stringvalidators.validate_hash(bHash): raise onionrexceptions.InvalidHexHash( "block hash not valid hash format") bl = onionrblockapi.Block(bHash) if decrypt: bl.decrypt() if bl.isEncrypted and not bl.decrypted: raise ValueError if not raw: if not headerOnly: retData = {'meta':bl.bheader, 'metadata': bl.bmetadata, 'content': bl.bcontent} for x in list(retData.keys()): try: retData[x] = retData[x].decode() except AttributeError: pass else: validSig = False signer = bytesconverter.bytes_to_str(bl.signer) if bl.isSigned() and stringvalidators.validate_pub_key(signer) and bl.isSigner(signer): validSig = True bl.bheader['validSig'] = validSig bl.bheader['meta'] = '' retData = {'meta': bl.bheader, 'metadata': bl.bmetadata} return json.dumps(retData) else: return bl.raw
def addForwardKey(self, newKey, expire=DEFAULT_KEY_EXPIRE): newKey = bytesconverter.bytes_to_str( unpaddedbase32.repad(bytesconverter.str_to_bytes(newKey))) if not stringvalidators.validate_pub_key(newKey): # Do not add if something went wrong with the key raise onionrexceptions.InvalidPubkey(newKey) conn = sqlite3.connect(dbfiles.user_id_info_db, timeout=DATABASE_LOCK_TIMEOUT) c = conn.cursor() # Get the time we're inserting the key at timeInsert = epoch.get_epoch() # Look at our current keys for duplicate key data or time for entry in self._getForwardKeys(): if entry[0] == newKey: return False if entry[1] == timeInsert: timeInsert += 1 # Sleep if our time is the same to prevent dupe time records time.sleep(1) # Add a forward secrecy key for the peer # Prepare the insert command = (self.publicKey, newKey, timeInsert, timeInsert + expire) c.execute("INSERT INTO forwardKeys VALUES(?, ?, ?, ?);", command) conn.commit() conn.close() return True
def showOutput(self): while type(self.channel) is type(None) and self.flowRunning: time.sleep(1) try: while self.flowRunning: for block in blockmetadb.get_blocks_by_type('brd'): if block in self.alreadyOutputed: continue block = Block(block) b_hash = bytesconverter.bytes_to_str(block.getHash()) if block.getMetadata('ch') != self.channel: continue if not self.flowRunning: break logger.info('\n------------------------', prompt=False, terminal=True) content = block.getContent() # Escape new lines, remove trailing whitespace, and escape ansi sequences content = escapeansi.escape_ANSI(content.replace( b'\n', b'\\n').replace(b'\r', b'\\r').strip().decode('utf-8')) logger.info(block.getDate().strftime( "%m/%d %H:%M") + ' - ' + logger.colors.reset + content, prompt=False, terminal=True) self.alreadyOutputed.append(b_hash) time.sleep(5) except KeyboardInterrupt: self.flowRunning = False
def do_pow(self, pipe): """find partial hash colision generating nonce for a block""" nonce = 0 data = self.data metadata = self.metadata metadata['n'] = secrets.randbits(16) puzzle = self.puzzle difficulty = self.difficulty try: while True: #logger.info('still running', terminal=True) # Break if shutdown received try: if pipe.poll() and pipe.recv() == 'shutdown': break except KeyboardInterrupt: break # Load nonce into block metadata metadata['c'] = nonce # Serialize metadata, combine with block data payload = json.dumps(metadata).encode() + b'\n' + data # Check sha3_256 hash of block, compare to puzzle # Send payload if puzzle finished token = sha3_hash(payload) # ensure token is string token = bytesconverter.bytes_to_str(token) if puzzle == token[0:difficulty]: pipe.send(payload) break nonce += 1 except KeyboardInterrupt: pass
def get_session( self, block_hash: Union(str, bytes)) -> session.UploadSession: block_hash = reconstructhash.deconstruct_hash( bytesconverter.bytes_to_str(block_hash)) for session in self.sessions: if session.block_hash == block_hash: return session raise KeyError
def on_processblocks(api, data=None): if data['type'] != 'pm': return notification_func = notifier.notify data['block'].decrypt() metadata = data['block'].bmetadata signer = bytesconverter.bytes_to_str(data['block'].signer) user = contactmanager.ContactManager(signer, saveUser=False) name = user.get_info("name") if name != 'anonymous' and name != None: signer = name.title() else: signer = signer[:5] if data['block'].decrypted: config.reload() if config.get('mail.notificationSound', True): notification_func = notifier.notification_with_sound if config.get('mail.notificationSetting', True): if not config.get('mail.strangersNotification', True): if not user.isFriend(): return notification_func(title="Onionr Mail - New Message", message="From: %s\n\nSubject: %s" % (signer, metadata['subject']))
def get_block_metadata_from_data(block_data): """ accepts block contents as string, returns a tuple of metadata, meta (meta being internal metadata, which will be returned as an encrypted base64 string if it is encrypted, dict if not). """ meta = {} metadata = {} data = block_data try: block_data = block_data.encode() except AttributeError: pass try: metadata = json.loads( bytesconverter.bytes_to_str(block_data[:block_data.find(b'\n')])) except JSONDecodeError: pass except ValueError: logger.warn("Could not get metadata from:", terminal=True) logger.warn(block_data, terminal=True) else: data = block_data[block_data.find(b'\n'):] meta = metadata['meta'] return (metadata, meta, data)
def handle_announce(request): """accept announcement posts, validating POW clientAPI should be an instance of the clientAPI server running, request is a instance of a flask request """ resp = 'failure' newNode = '' try: newNode = request.form['node'].encode() except KeyError: logger.warn('No node specified for upload') else: newNode = bytesconverter.bytes_to_str(newNode) announce_queue = deadsimplekv.DeadSimpleKV(filepaths.announce_cache) announce_queue_list = announce_queue.get('new_peers') if announce_queue_list is None: announce_queue_list = [] else: if len(announce_queue_list) >= onionrvalues.MAX_NEW_PEER_QUEUE: newNode = '' if stringvalidators.validate_transport(newNode) and \ newNode not in announce_queue_list: g.shared_state.get( deadsimplekv.DeadSimpleKV).get('newPeers').append(newNode) announce_queue.put('new_peers', announce_queue_list.append(newNode)) announce_queue.flush() resp = 'Success' resp = Response(resp) if resp == 'failure': return resp, 406 return resp
def do_pow(self, pipe): """find partial hash colision generating nonce for a block""" nonce = 0 data = self.data metadata = self.metadata puzzle = self.puzzle difficulty = self.difficulty while True: # Break if shutdown received if pipe.poll() and pipe.recv() == 'shutdown': break # Load nonce into block metadata metadata['pow'] = nonce # Serialize metadata, combine with block data payload = json.dumps(metadata).encode() + b'\n' + data # Check sha3_256 hash of block, compare to puzzle # Send payload if puzzle finished token = crypto.hashers.sha3_hash(payload) # ensure token is string token = bytesconverter.bytes_to_str(token) if puzzle == token[0:difficulty]: pipe.send(payload) break nonce += 1
def addToDB(self, data, dataType=0, expire=0): '''Add to the blacklist. Intended to be block hash, block data, peers, or transport addresses 0=block 1=peer 2=pubkey ''' # we hash the data so we can remove data entirely from our node's disk hashed = bytesconverter.bytes_to_str(onionrcrypto.hashers.sha3_hash(data)) if len(hashed) > 64: raise Exception("Hashed data is too large") if not hashed.isalnum(): raise Exception("Hashed data is not alpha numeric") try: int(dataType) except ValueError: raise Exception("dataType is not int") try: int(expire) except ValueError: raise Exception("expire is not int") if self.inBlacklist(hashed): return insert = (hashed,) blacklistDate = epoch.get_epoch() try: self._dbExecute("INSERT INTO blacklist (hash, dataType, blacklistDate, expire) VALUES(?, ?, ?, ?);", (str(hashed), dataType, blacklistDate, expire)) except sqlite3.IntegrityError: pass
def add_ID(): """Command to create a new user ID key pair.""" key_manager = keymanager.KeyManager() pw = "" try: sys.argv[2] # pylint: disable=W0104 if not sys.argv[2].lower() == 'true': raise ValueError except (IndexError, ValueError): newID = key_manager.addKey()[0] else: pw = "-".join(niceware.generate_passphrase(32)) newID, privKey = onionrcrypto.generate_deterministic(pw) try: key_manager.addKey(pubKey=newID, privKey=privKey) except ValueError: logger.error( 'That ID is already available, you can change to it ' + 'with the change-id command.', terminal=True) return if pw: print("Phrase to restore ID:", pw) logger.info('Added ID: %s' % (bytesconverter.bytes_to_str(newID.replace('=', '')), ), terminal=True)
def announce_node(daemon): '''Announce our node to our peers''' ret_data = False announce_fail = False # Do not let announceCache get too large if len(daemon.announceCache) >= 10000: daemon.announceCache.popitem() if daemon.config.get('general.security_level', 0) == 0: # Announce to random online peers for i in daemon.onlinePeers: if not i in daemon.announceCache and not i in daemon.announceProgress: peer = i break else: peer = onlinepeers.pick_online_peer(daemon) for x in range(1): try: ourID = gettransports.get()[0] except IndexError: break url = 'http://' + peer + '/announce' data = {'node': ourID} combinedNodes = ourID + peer if ourID != 1: existingRand = bytesconverter.bytes_to_str(keydb.transportinfo.get_address_info(peer, 'powValue')) # Reset existingRand if it no longer meets the minimum POW if type(existingRand) is type(None) or not existingRand.endswith('0' * onionrvalues.ANNOUNCE_POW): existingRand = '' if peer in daemon.announceCache: data['random'] = daemon.announceCache[peer] elif len(existingRand) > 0: data['random'] = existingRand else: daemon.announceProgress[peer] = True proof = onionrproofs.DataPOW(combinedNodes, minDifficulty=onionrvalues.ANNOUNCE_POW) del daemon.announceProgress[peer] try: data['random'] = base64.b64encode(proof.waitForResult()[1]) except TypeError: # Happens when we failed to produce a proof logger.error("Failed to produce a pow for announcing to " + peer) announce_fail = True else: daemon.announceCache[peer] = data['random'] if not announce_fail: logger.info('Announcing node to ' + url) if basicrequests.do_post_request(url, data, port=daemon.shared_state.get(NetController).socksPort) == 'Success': logger.info('Successfully introduced node to ' + peer, terminal=True) ret_data = True keydb.transportinfo.set_address_info(peer, 'introduced', 1) keydb.transportinfo.set_address_info(peer, 'powValue', data['random']) daemon.decrementThreadCount('announce_node') return ret_data
def generateForwardKey(self, expire=DEFAULT_KEY_EXPIRE): # Generate a forward secrecy key for the peer conn = sqlite3.connect(dbfiles.forward_keys_db, timeout=10) c = conn.cursor() # Prepare the insert time = epoch.get_epoch() newKeys = onionrcrypto.generate() newPub = bytesconverter.bytes_to_str(newKeys[0]) newPriv = bytesconverter.bytes_to_str(newKeys[1]) command = (self.publicKey, newPub, newPriv, time, expire + time) c.execute("INSERT INTO myForwardKeys VALUES(?, ?, ?, ?, ?);", command) conn.commit() conn.close() return newPub
def process_block_metadata(blockHash: str): """ Read metadata from a block and cache it to the block database. blockHash -> sha3_256 hex formatted hash of Onionr block """ curTime = epoch.get_rounded_epoch(roundS=60) myBlock = onionrblockapi.Block(blockHash) if myBlock.isEncrypted: myBlock.decrypt() if (myBlock.isEncrypted and myBlock.decrypted) or (not myBlock.isEncrypted): blockType = myBlock.getMetadata( 'type' ) # we would use myBlock.getType() here, but it is bugged with encrypted blocks signer = bytesconverter.bytes_to_str(myBlock.signer) valid = myBlock.verifySig() if valid: if myBlock.getMetadata('newFSKey') is not None: try: onionrusers.OnionrUser(signer).addForwardKey( myBlock.getMetadata('newFSKey')) except onionrexceptions.InvalidPubkey: logger.warn( '%s has invalid forward secrecy key to add: %s' % (signer, myBlock.getMetadata('newFSKey'))) try: if len(blockType) <= onionrvalues.MAX_BLOCK_TYPE_LENGTH: blockmetadb.update_block_info(blockHash, 'dataType', blockType) except TypeError: logger.warn("Missing block information") pass # Set block expire time if specified try: expireTime = int(myBlock.getHeader('expire')) # test that expire time is an integer of sane length (for epoch) # doesn't matter if its too large because of the min() func below if not len(str(expireTime)) < 20: raise ValueError('timestamp invalid') except (ValueError, TypeError) as e: expireTime = onionrvalues.DEFAULT_EXPIRE + curTime finally: expireTime = min(expireTime, curTime + onionrvalues.DEFAULT_EXPIRE) blockmetadb.update_block_info(blockHash, 'expire', expireTime) if blockType == 'update': updater.update_event(myBlock) onionrevents.event('processblocks', data={ 'block': myBlock, 'type': blockType, 'signer': signer, 'validSig': valid })
def client_api_insert_block(): encrypt = False bData = request.get_json(force=True) message = bData['message'] message_hash = bytesconverter.bytes_to_str(hashers.sha3_hash(message)) # Detect if message (block body) is not specified if type(message) is None: return 'failure due to unspecified message', 400 # Detect if block with same message is already being inserted if message_hash in g.too_many.get_by_string( "OnionrCommunicatorDaemon").generating_blocks: return 'failure due to duplicate insert', 400 else: g.too_many.get_by_string( "OnionrCommunicatorDaemon").generating_blocks.append(message_hash) subject = 'temp' encryptType = '' sign = True meta = {} to = '' try: if bData['encrypt']: to = bData['to'].strip() if "-" in to: to = mnemonickeys.get_base32(to) encrypt = True encryptType = 'asym' except KeyError: pass try: if not bData['sign']: sign = False except KeyError: pass try: bType = bData['type'] except KeyError: bType = 'bin' try: meta = json.loads(bData['meta']) except KeyError: pass threading.Thread(target=onionrblocks.insert, args=(message, ), kwargs={ 'header': bType, 'encryptType': encryptType, 'sign': sign, 'asymPeer': to, 'meta': meta }).start() return Response('success')
def service_creator(daemon): assert isinstance(daemon, communicator.OnionrCommunicatorDaemon) # Find socket connection blocks # TODO cache blocks and only look at recently received ones con_blocks = blockmetadb.get_blocks_by_type('con') for b in con_blocks: if not b in daemon.active_services: bl = onionrblockapi.Block(b, decrypt=True) bs = bytesconverter.bytes_to_str(bl.bcontent) + '.onion' if server_exists(bl.signer): continue if stringvalidators.validate_pub_key(bl.signer) and stringvalidators.validate_transport(bs): signer = bytesconverter.bytes_to_str(bl.signer) daemon.active_services.append(b) daemon.active_services.append(signer) if not daemon.services.create_server(signer, bs, daemon): daemon.active_services.remove(b) daemon.active_services.remove(signer) daemon.decrementThreadCount('service_creator')
def __init__(self, block_hash: Union[str, bytes]): block_hash = bytesconverter.bytes_to_str(block_hash) block_hash = reconstructhash.reconstruct_hash(block_hash) if not stringvalidators.validate_hash(block_hash): raise ValueError self.start_time = epoch.get_epoch() self.block_hash = reconstructhash.deconstruct_hash(block_hash) self.total_fail_count: int = 0 self.total_success_count: int = 0 self.peer_fails = {} self.peer_exists = {}
def handle_announce(request): ''' accept announcement posts, validating POW clientAPI should be an instance of the clientAPI server running, request is a instance of a flask request ''' resp = 'failure' powHash = '' randomData = '' newNode = '' try: newNode = request.form['node'].encode() except KeyError: logger.warn('No node specified for upload') pass else: try: randomData = request.form['random'] randomData = base64.b64decode(randomData) except KeyError: logger.warn('No random data specified for upload') else: nodes = newNode + bytesconverter.str_to_bytes( gettransports.get()[0]) nodes = crypto.hashers.blake2b_hash(nodes) powHash = crypto.hashers.blake2b_hash(randomData + nodes) try: powHash = powHash.decode() except AttributeError: pass if powHash.startswith('0' * onionrvalues.ANNOUNCE_POW): newNode = bytesconverter.bytes_to_str(newNode) announce_queue = deadsimplekv.DeadSimpleKV( filepaths.announce_cache) announce_queue_list = announce_queue.get('new_peers') if announce_queue_list is None: announce_queue_list = [] if stringvalidators.validate_transport( newNode) and not newNode in announce_queue_list: #clientAPI.onionrInst.communicatorInst.newPeers.append(newNode) g.shared_state.get( OnionrCommunicatorDaemon).newPeers.append(newNode) announce_queue.put('new_peers', announce_queue_list.append(newNode)) announce_queue.flush() resp = 'Success' else: logger.warn(newNode.decode() + ' failed to meet POW: ' + powHash) resp = Response(resp) if resp == 'failure': return resp, 406 return resp
def get_motd()->Response: motds = blockmetadb.get_blocks_by_type("motd") newest_time = 0 message = "No MOTD currently present." for x in motds: bl = onionrblocks.onionrblockapi.Block(x) if not bl.verifySig() or bl.signer != bytesconverter.bytes_to_str(unpaddedbase32.repad(bytesconverter.str_to_bytes(signer))): continue if not bl.isSigner(signer): continue if bl.claimedTime > newest_time: newest_time = bl.claimedTime message = bl.bcontent return Response(message, headers={"Content-Type": "text/plain"})
def set_by_key(key): """Overwrite/set only 1 config key""" """ { 'data': data } """ try: data = json.loads(bytes_to_str(request.data)) except (JSONDecodeError, KeyError): abort(400) config.set(key, data, True) return Response('success')
def inBlacklist(self, data): hashed = bytesconverter.bytes_to_str(onionrcrypto.hashers.sha3_hash(data)) retData = False if not hashed.isalnum(): raise Exception("Hashed data is not alpha numeric") if len(hashed) > 64: raise Exception("Hashed data is too large") for i in self._dbExecute("SELECT * FROM blacklist WHERE hash = ?", (hashed,)): retData = True # this only executes if an entry is present by that hash break return retData
def add_ID(): """Command to create a new user ID key pair.""" key_manager = keymanager.KeyManager() try: sys.argv[2] # pylint: disable=W0104 if not sys.argv[2].lower() == 'true': raise ValueError except (IndexError, ValueError): newID = key_manager.addKey()[0] else: logger.warn('Deterministic keys require random and long passphrases.', terminal=True) logger.warn( 'If a good passphrase is not used, your key can be easily stolen.', terminal=True) logger.warn('You should use a series of hard to guess words, ' + 'see this for reference: https://www.xkcd.com/936/', terminal=True) try: pass1 = getpass.getpass(prompt='Enter at least %s characters: ' % (DETERMINISTIC_REQUIREMENT, )) pass2 = getpass.getpass(prompt='Confirm entry: ') except KeyboardInterrupt: sys.exit(42) if onionrcrypto.cryptoutils.safe_compare(pass1, pass2): try: logger.info( 'Generating deterministic key. This can take a while.', terminal=True) newID, privKey = onionrcrypto.generate_deterministic(pass1) except onionrexceptions.PasswordStrengthError: logger.error('Passphrase must use at least %s characters.' % (DETERMINISTIC_REQUIREMENT, ), terminal=True) sys.exit(1) else: logger.error('Passwords do not match.', terminal=True) sys.exit(1) try: key_manager.addKey(pubKey=newID, privKey=privKey) except ValueError: logger.error( 'That ID is already available, you can change to it ' + 'with the change-id command.', terminal=True) return logger.info('Added ID: %s' % (bytesconverter.bytes_to_str(newID), ), terminal=True)
def getGeneratedForwardKeys(self, genNew=True): # Fetch the keys we generated for the peer, that are still around conn = sqlite3.connect(dbfiles.forward_keys_db, timeout=10) c = conn.cursor() pubkey = self.publicKey pubkey = bytesconverter.bytes_to_str(pubkey) command = (pubkey, ) keyList = [] # list of tuples containing pub, private for peer for result in c.execute("SELECT * FROM myForwardKeys WHERE peer = ?", command): keyList.append((result[1], result[2])) if len(keyList) == 0: if genNew: self.generateForwardKey() keyList = self.getGeneratedForwardKeys() return list(keyList)
def on_processblocks(api, data=None): if data['type'] != 'pm': return data['block'].decrypt() metadata = data['block'].bmetadata signer = bytesconverter.bytes_to_str(data['block'].signer) user = contactmanager.ContactManager(signer, saveUser=False) name = user.get_info("name") if name != 'anonymous' and name != None: signer = name.title() else: signer = signer[:5] if data['block'].decrypted: notifier.notify(title="Onionr Mail - New Message", message="From: %s\n\nSubject: %s" % (signer, metadata['subject']))
def getData(bHash): if not stringvalidators.validate_hash(bHash): raise ValueError bHash = bytesconverter.bytes_to_str(bHash) # First check DB for data entry by hash # if no entry, check disk # If no entry in either, raise an exception retData = None fileLocation = '%s/%s.dat' % (filepaths.block_data_location, bHash) not_found_msg = "Flock data not found for: " if os.path.exists(fileLocation): with open(fileLocation, 'rb') as block: retData = block.read() else: retData = _dbFetch(bHash) if retData is None: raise onionrexceptions.NoDataAvailable(not_found_msg + str(bHash)) return retData
def add_session( self, session_or_block: Union(str, bytes, session.UploadSession) ) -> session.UploadSession: """Create (or add existing) block upload session from a str/bytes block hex hash, existing UploadSession""" if isinstance(session_or_block, session.UploadSession): if not session_or_block in self.sessions: self.sessions.append(session_or_block) return session_or_block try: return self.get_session(session_or_block) except KeyError: pass # convert bytes hash to str if isinstance(session_or_block, bytes): session_or_block = bytesconverter.bytes_to_str(session_or_block) # intentionally not elif if isinstance(session_or_block, str): new_session = session.UploadSession(session_or_block) self.sessions.append(new_session) return new_session
def __init__(self, publicKey, saveUser=False, recordExpireSeconds=5): try: if mnemonickeys.DELIMITER in publicKey: publicKey = mnemonickeys.get_base32(publicKey) #publicKey = unpaddedbase32.b32encode(bytesconverter.str_to_bytes(publicKey)) except ValueError: pass publicKey = bytesconverter.bytes_to_str( unpaddedbase32.repad(bytesconverter.str_to_bytes(publicKey))) super(ContactManager, self).__init__(publicKey, saveUser=saveUser) home = identifyhome.identify_home() self.dataDir = home + '/contacts/' self.dataFile = '%s/contacts/%s.json' % (home, publicKey) self.lastRead = 0 self.recordExpire = recordExpireSeconds self.data = self._loadData() self.deleted = False if not os.path.exists(self.dataDir): os.mkdir(self.dataDir)