def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called when command "Files" were received from one of my suppliers. This is an answer from given supplier (after my request) to get a list of our files stored on his machine. """ supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator target_key_id = my_keys.latest_key_id(list_files_global_id['key_id']) if not my_keys.is_key_private(target_key_id): lg.warn('key %r not registered, not possible to decrypt ListFiles() packet from %r' % (target_key_id, supplier_idurl, )) return False try: block = encrypted.Unserialize(newpacket.Payload, decrypt_key=target_key_id, ) input_data = block.Data() except: lg.err('failed decrypting data from packet %r received from %r' % (newpacket, supplier_idurl)) return False list_files_raw = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) remote_files_changed, backups2remove, paths2remove, missed_backups = backup_matrix.process_raw_list_files( supplier_num=num, list_files_text_body=list_files_raw, customer_idurl=None, is_in_sync=None, auto_create=False, ) list_files_orator.IncomingListFiles(newpacket) if remote_files_changed: backup_matrix.SaveLatestRawListFiles(supplier_idurl, list_files_raw) if _Debug: lg.args(_DebugLevel, supplier=nameurl.GetName(supplier_idurl), customer=nameurl.GetName(customer_idurl), backups2remove=len(backups2remove), paths2remove=len(paths2remove), files_changed=remote_files_changed, missed_backups=len(missed_backups), ) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def doPrepare(self, *args, **kwargs): """ Action method. """ self.restored_count = 0 self.saved_count = 0 self.deleted_count = 0 self.stored_keys = {} self.not_stored_keys = {} self.unreliable_keys = {} self.keys_to_upload = set() self.keys_to_erase = {} self.keys_to_rename = {} lookup = backup_fs.ListChildsByPath( path='.keys', recursive=False, ) if isinstance(lookup, list): minimum_reliable_percent = eccmap.GetCorrectablePercent( eccmap.Current().suppliers_number) for i in lookup: if i['path'].endswith('.public'): stored_key_id = i['path'].replace('.public', '').replace( '.keys/', '') is_private = False else: stored_key_id = i['path'].replace('.private', '').replace( '.keys/', '') is_private = True stored_key_id = my_keys.latest_key_id(stored_key_id) is_reliable = False for v in i['versions']: try: reliable = float(v['reliable'].replace('%', '')) except: lg.exc() reliable = 0.0 if reliable >= minimum_reliable_percent: is_reliable = True break if is_reliable: self.stored_keys[stored_key_id] = is_private else: if is_private and my_keys.is_key_private(stored_key_id): self.not_stored_keys[stored_key_id] = is_private elif not is_private and my_keys.is_key_registered( stored_key_id): self.not_stored_keys[stored_key_id] = is_private else: self.unreliable_keys[stored_key_id] = is_private if _Debug: lg.args(_DebugLevel, stored_keys=len(self.stored_keys), not_stored_keys=list(self.not_stored_keys.keys()), unreliable_keys=len(self.unreliable_keys))
def on_audit_key_received(newpacket, info, status, error_message): """ Callback will be executed when remote user would like to check if I poses given key locally. """ block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'key_ring.on_audit_key_received ERROR reading data from %s' % newpacket.RemoteID) return False try: raw_payload = block.Data() json_payload = serialization.BytesToDict(raw_payload, keys_to_text=True, values_to_text=True) key_id = json_payload['key_id'] json_payload['audit'] public_sample = base64.b64decode( json_payload['audit']['public_sample']) private_sample = base64.b64decode( json_payload['audit']['private_sample']) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False if not my_keys.is_valid_key_id(key_id): p2p_service.SendFail(newpacket, 'invalid key id') return False if not my_keys.is_key_registered(key_id, include_master=True): p2p_service.SendFail(newpacket, 'key not registered') return False if public_sample: response_payload = base64.b64encode( my_keys.encrypt(key_id, public_sample)) p2p_service.SendAck(newpacket, response_payload) if _Debug: lg.info('remote user %s requested audit of public key %s' % (newpacket.OwnerID, key_id)) return True if private_sample: if not my_keys.is_key_private(key_id): p2p_service.SendFail(newpacket, 'private key not registered') return False response_payload = base64.b64encode( my_keys.decrypt(key_id, private_sample)) p2p_service.SendAck(newpacket, response_payload) if _Debug: lg.info('remote user %s requested audit of private key %s' % (newpacket.OwnerID, key_id)) return True p2p_service.SendFail(newpacket, 'wrong audit request') return False
def SendListFiles(target_supplier, customer_idurl=None, key_id=None, query_items=[], wide=False, callbacks={}, timeout=None): """ This is used as a request method from your supplier : if you send him a ListFiles() packet he will reply you with a list of stored files in a Files() packet. """ MyID = my_id.getLocalID() if not customer_idurl: customer_idurl = MyID if not str(target_supplier).isdigit(): RemoteID = target_supplier else: RemoteID = contactsdb.supplier(target_supplier, customer_idurl=customer_idurl) if not RemoteID: lg.warn("RemoteID is empty target_supplier=%s" % str(target_supplier)) return None if not key_id: # key_id = global_id.MakeGlobalID(idurl=customer_idurl, key_alias='customer') # TODO: due to issue with "customer" key backup/restore decided to always use my "master" key # to retrieve my list files info from supplier # expect remote user always poses my master public key from my identity. # probably require more work to build more reliable solution without using my master key at all # when my identity rotated supplier first needs to receive my new identity and then sending ListFiles() key_id = my_id.getGlobalID(key_alias='master') if not my_keys.is_key_registered(key_id) or not my_keys.is_key_private(key_id): lg.warn('key %r not exist or public, my "master" key to be used with ListFiles() packet' % key_id) key_id = my_id.getGlobalID(key_alias='master') PacketID = "%s:%s" % (key_id, packetid.UniqueID(), ) if not query_items: query_items = ['*', ] Payload = serialization.DictToBytes({'items': query_items, }) if _Debug: lg.out(_DebugLevel, "p2p_service.SendListFiles %r to %s with query : %r" % ( PacketID, nameurl.GetName(RemoteID), query_items, )) result = signed.Packet( Command=commands.ListFiles(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=RemoteID, ) gateway.outbox(result, wide=wide, callbacks=callbacks, response_timeout=timeout) return result
def _on_files_received(self, newpacket, info): import json from logs import lg from p2p import p2p_service from storage import backup_fs from storage import backup_control from crypt import encrypted from crypt import my_keys from userid import my_id from userid import global_id try: user_id = newpacket.PacketID.strip().split(':')[0] if user_id == my_id.getGlobalID(): # skip my own Files() packets which comes from my suppliers # only process list Files() from other users who granted me access return False key_id = user_id if not my_keys.is_valid_key_id(key_id): # ignore, invalid key id in packet id return False if not my_keys.is_key_private(key_id): raise Exception('private key is not registered') except Exception as exc: lg.warn(str(exc)) p2p_service.SendFail(newpacket, str(exc)) return False block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.warn('failed reading data from %s' % newpacket.RemoteID) return False if block.CreatorID != global_id.GlobalUserToIDURL(user_id): lg.warn( 'invalid packet, creator ID must be present in packet ID : %s ~ %s' % ( block.CreatorID, user_id, )) return False try: json_data = json.loads(block.Data(), encoding='utf-8') json_data['items'] customer_idurl = block.CreatorID count = backup_fs.Unserialize( raw_data=json_data, iter=backup_fs.fs(customer_idurl), iterID=backup_fs.fsID(customer_idurl), from_json=True, ) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False p2p_service.SendAck(newpacket) if count == 0: lg.warn('no files were imported during file sharing') else: backup_control.Save() lg.info('imported %d shared files from %s, key_id=%s' % ( count, customer_idurl, key_id, )) return True # from access import shared_access_coordinator # this_share = shared_access_coordinator.get_active_share(key_id) # if not this_share: # lg.warn('share is not opened: %s' % key_id) # p2p_service.SendFail(newpacket, 'share is not opened') # return False # this_share.automat('customer-list-files-received', (newpacket, info, block, )) return True
def on_key_received(newpacket, info, status, error_message): block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'key_ring.on_key_received ERROR reading data from %s' % newpacket.RemoteID) return False try: key_data = block.Data() key_json = json.loads(key_data) key_id = key_json['key_id'] key_id, key_object = my_keys.read_key_info(key_json) if key_object.isPublic(): # received key is a public key if my_keys.is_key_registered(key_id): # but we already have a key with that ID if my_keys.is_key_private(key_id): # we should not overwrite existing private key raise Exception('private key already registered') if my_keys.get_public_key_raw( key_id, 'openssh') != key_object.toString('openssh'): # and we should not overwrite existing public key as well raise Exception( 'another key already registered with that ID') p2p_service.SendAck(newpacket) lg.warn('received existing public key: %s, skip' % key_id) return True if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') else: lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) if _Debug: lg.info( 'received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private'))) return True # received key is a private key if my_keys.is_key_registered(key_id): # check if we already have that key if my_keys.is_key_private(key_id): # we have already private key with same ID!!! if my_keys.get_private_key_raw( key_id, 'openssh') != key_object.toString('openssh'): # and this is a new private key : we should not overwrite! raise Exception('private key already registered') # this is the same private key p2p_service.SendAck(newpacket) lg.warn('received existing private key: %s, skip' % key_id) return True # but we have a public key with same ID if my_keys.get_public_key_raw( key_id, 'openssh') != key_object.public().toString('openssh'): # and we should not overwrite existing public key as well raise Exception('another key already registered with that ID') lg.info('erasing public key %s' % key_id) my_keys.erase_key(key_id) if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True # no private key with given ID was registered if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False
def _on_files_received(self, newpacket, info): from logs import lg from lib import serialization from main import settings from main import events from p2p import p2p_service from storage import backup_fs from storage import backup_control from crypt import encrypted from crypt import my_keys from userid import my_id from userid import global_id from storage import backup_matrix from supplier import list_files from contacts import contactsdb list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False trusted_customer_idurl = list_files_global_id['idurl'] incoming_key_id = list_files_global_id['key_id'] if trusted_customer_idurl == my_id.getGlobalID(): lg.warn('skip %s packet which seems to came from my own supplier' % newpacket) # only process list Files() from other users who granted me access return False if not my_keys.is_valid_key_id(incoming_key_id): lg.warn('ignore, invalid key id in packet %s' % newpacket) return False if not my_keys.is_key_private(incoming_key_id): lg.warn('private key is not registered : %s' % incoming_key_id) p2p_service.SendFail(newpacket, 'private key is not registered') return False try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=incoming_key_id, ) except: lg.exc(newpacket.Payload) return False if block is None: lg.warn('failed reading data from %s' % newpacket.RemoteID) return False # if block.CreatorID != trusted_customer_idurl: # lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % ( # block.CreatorID, list_files_global_id['idurl'], )) # return False try: raw_files = block.Data() except: lg.exc() return False if block.CreatorID == trusted_customer_idurl: # this is a trusted guy sending some shared files to me try: json_data = serialization.BytesToDict(raw_files, keys_to_text=True) json_data['items'] except: lg.exc() return False count = backup_fs.Unserialize( raw_data=json_data, iter=backup_fs.fs(trusted_customer_idurl), iterID=backup_fs.fsID(trusted_customer_idurl), from_json=True, ) p2p_service.SendAck(newpacket) events.send( 'shared-list-files-received', dict( customer_idurl=trusted_customer_idurl, new_items=count, )) if count == 0: lg.warn('no files were imported during file sharing') else: backup_control.Save() lg.info('imported %d shared files from %s, key_id=%s' % ( count, trusted_customer_idurl, incoming_key_id, )) return True # otherwise this must be an external supplier sending us a files he stores for trusted customer external_supplier_idurl = block.CreatorID try: supplier_raw_list_files = list_files.UnpackListFiles( raw_files, settings.ListFilesFormat()) backup_matrix.SaveLatestRawListFiles( supplier_idurl=external_supplier_idurl, raw_data=supplier_raw_list_files, customer_idurl=trusted_customer_idurl, ) except: lg.exc() return False # need to detect supplier position from the list of packets # and place that supplier on the correct position in contactsdb real_supplier_pos = backup_matrix.DetectSupplierPosition( supplier_raw_list_files) known_supplier_pos = contactsdb.supplier_position( external_supplier_idurl, trusted_customer_idurl) if real_supplier_pos >= 0: if known_supplier_pos >= 0 and known_supplier_pos != real_supplier_pos: lg.warn( 'external supplier %s position is not matching to list files, rewriting for customer %s' % (external_supplier_idurl, trusted_customer_idurl)) contactsdb.erase_supplier( idurl=external_supplier_idurl, customer_idurl=trusted_customer_idurl, ) contactsdb.add_supplier( idurl=external_supplier_idurl, position=real_supplier_pos, customer_idurl=trusted_customer_idurl, ) contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl) else: lg.warn( 'not possible to detect external supplier position for customer %s' % trusted_customer_idurl) # finally send ack packet back p2p_service.SendAck(newpacket) lg.info( 'received list of packets from external supplier %s for customer %s' % (external_supplier_idurl, trusted_customer_idurl)) return True
def on_files_received(newpacket, info): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False trusted_customer_idurl = list_files_global_id['idurl'] incoming_key_id = list_files_global_id['key_id'] if trusted_customer_idurl == my_id.getLocalID(): if _Debug: lg.dbg(_DebugLevel, 'ignore %s packet which seems to came from my own supplier' % newpacket) # only process list Files() from other customers who granted me access to their files return False if not my_keys.is_valid_key_id(incoming_key_id): lg.warn('ignore, invalid key id in packet %s' % newpacket) return False if not my_keys.is_key_private(incoming_key_id): lg.warn('private key is not registered : %s' % incoming_key_id) p2p_service.SendFail(newpacket, 'private key is not registered') return False try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=incoming_key_id, ) except: lg.exc(newpacket.Payload) return False if block is None: lg.warn('failed reading data from %s' % newpacket.RemoteID) return False # if block.CreatorID != trusted_customer_idurl: # lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % ( # block.CreatorID, list_files_global_id['idurl'], )) # return False try: raw_files = block.Data() except: lg.exc() return False if block.CreatorID == trusted_customer_idurl: # this is a trusted guy sending some shared files to me try: json_data = serialization.BytesToDict(raw_files, keys_to_text=True, encoding='utf-8') json_data['items'] except: lg.exc() return False count = backup_fs.Unserialize( raw_data=json_data, iter=backup_fs.fs(trusted_customer_idurl), iterID=backup_fs.fsID(trusted_customer_idurl), from_json=True, ) p2p_service.SendAck(newpacket) if count == 0: lg.warn('no files were imported during file sharing') else: backup_control.Save() lg.info('imported %d shared files from %s, key_id=%s' % ( count, trusted_customer_idurl, incoming_key_id, )) events.send('shared-list-files-received', dict( customer_idurl=trusted_customer_idurl, new_items=count, )) return True # otherwise this must be an external supplier sending us a files he stores for trusted customer external_supplier_idurl = block.CreatorID try: supplier_raw_list_files = list_files.UnpackListFiles(raw_files, settings.ListFilesFormat()) except: lg.exc() return False # need to detect supplier position from the list of packets # and place that supplier on the correct position in contactsdb supplier_pos = backup_matrix.DetectSupplierPosition(supplier_raw_list_files) known_supplier_pos = contactsdb.supplier_position(external_supplier_idurl, trusted_customer_idurl) if _Debug: lg.args(_DebugLevel, supplier_pos=supplier_pos, known_supplier_pos=known_supplier_pos, external_supplier=external_supplier_idurl, trusted_customer=trusted_customer_idurl, key_id=incoming_key_id) if supplier_pos >= 0: if known_supplier_pos >= 0 and known_supplier_pos != supplier_pos: lg.err('known external supplier %r position %d is not matching to received list files position %d for customer %s' % ( external_supplier_idurl, known_supplier_pos, supplier_pos, trusted_customer_idurl)) # TODO: we should remove that bellow because we do not need it # service_customer_family() should take care of suppliers list for trusted customer # so we need to just read that list from DHT # contactsdb.erase_supplier( # idurl=external_supplier_idurl, # customer_idurl=trusted_customer_idurl, # ) # contactsdb.add_supplier( # idurl=external_supplier_idurl, # position=supplier_pos, # customer_idurl=trusted_customer_idurl, # ) # contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl) else: lg.warn('not possible to detect external supplier position for customer %s from received list files, known position is %s' % ( trusted_customer_idurl, known_supplier_pos)) supplier_pos = known_supplier_pos remote_files_changed, _, _, _ = backup_matrix.process_raw_list_files( supplier_num=supplier_pos, list_files_text_body=supplier_raw_list_files, customer_idurl=trusted_customer_idurl, is_in_sync=True, auto_create=True, ) if remote_files_changed: backup_matrix.SaveLatestRawListFiles( supplier_idurl=external_supplier_idurl, raw_data=supplier_raw_list_files, customer_idurl=trusted_customer_idurl, ) # finally sending Ack() packet back p2p_service.SendAck(newpacket) if remote_files_changed: lg.info('received updated list of files from external supplier %s for customer %s' % (external_supplier_idurl, trusted_customer_idurl)) return True
def do_backup_key(key_id, keys_folder=None, wait_result=False): """ Send given key to my suppliers to store it remotely. This will make a regular backup copy of that key file - encrypted with my master key. """ if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%r' % key_id) if key_id == my_id.getGlobalID(key_alias='master') or key_id == 'master': lg.err('master key must never leave local host') if wait_result: return fail(Exception('master key must never leave local host')) return False if not my_keys.is_key_registered(key_id): lg.err('unknown key: "%s"' % key_id) if wait_result: return fail(Exception('unknown key: "%s"' % key_id)) return False if not keys_folder: keys_folder = settings.KeyStoreDir() if my_keys.is_key_private(key_id): local_key_filepath = os.path.join(keys_folder, '%s.private' % key_id) remote_path_for_key = '.keys/%s.private' % key_id else: local_key_filepath = os.path.join(keys_folder, '%s.public' % key_id) remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) res = api.file_exists(global_key_path) if res['status'] == 'OK' and res['result'] and res['result'].get('exist'): lg.warn('key %s already exists in catalog' % global_key_path) global_key_path_id = res['result'].get('path_id') if global_key_path_id and backup_control.IsPathInProcess(global_key_path_id): lg.warn('skip, another backup for key already started: %s' % global_key_path_id) if not wait_result: return True backup_id_list = backup_control.FindRunningBackup(global_key_path_id) if backup_id_list: backup_id = backup_id_list[0] backup_job = backup_control.GetRunningBackupObject(backup_id) if backup_job: backup_result = Deferred() backup_job.resultDefer.addCallback( lambda resp: backup_result.callback(True) if resp == 'done' else backup_result.errback( Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp)))) if _Debug: backup_job.resultDefer.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='key_ring.do_backup_key') backup_job.resultDefer.addErrback(backup_result.errback) if _Debug: lg.args(_DebugLevel, backup_id=backup_id, global_key_path_id=global_key_path_id) return backup_result else: lg.warn('did not found running backup job: %r' % backup_id) else: lg.warn('did not found running backup id for path: %r' % global_key_path_id) else: res = api.file_create(global_key_path) if res['status'] != 'OK': lg.err('failed to create path "%s" in the catalog: %r' % (global_key_path, res)) if wait_result: return fail(Exception('failed to create path "%s" in the catalog: %r' % (global_key_path, res))) return False res = api.file_upload_start( local_path=local_key_filepath, remote_path=global_key_path, wait_result=wait_result, open_share=False, ) if not wait_result: if res['status'] != 'OK': lg.err('failed to upload key "%s": %r' % (global_key_path, res)) return False if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%s : %r' % (key_id, res)) return True backup_result = Deferred() # TODO: put that code bellow into api.file_upload_start() method with additional parameter def _job_done(result): if _Debug: lg.args(_DebugLevel, key_id=key_id, result=result) if result == 'done': backup_result.callback(True) else: backup_result.errback(Exception('failed to upload key "%s", backup is %r' % (key_id, result))) return None def _task_started(resp): if _Debug: lg.args(_DebugLevel, key_id=key_id, response=resp) if resp['status'] != 'OK': backup_result.errback(Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp))) return None backupObj = backup_control.jobs().get(resp['version']) if not backupObj: backup_result.errback(Exception('failed to upload key "%s", task %r failed to start' % (global_key_path, resp['version']))) return None backupObj.resultDefer.addCallback(_job_done) backupObj.resultDefer.addErrback(backup_result.errback) return None if not isinstance(res, Deferred): res_defer = Deferred() res_defer.callback(res) res = res_defer res.addCallback(_task_started) res.addErrback(backup_result.errback) return backup_result
def on_key_received(newpacket, info, status, error_message): """ Callback will be executed when I receive a new key from one remote user. """ block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.err('failed reading key info from %s' % newpacket.RemoteID) return False try: key_data = block.Data() key_json = serialization.BytesToDict(key_data, keys_to_text=True, values_to_text=True) key_id = key_json['key_id'] key_label = key_json.get('label', '') key_id, key_object = my_keys.read_key_info(key_json) if key_object.isSigned(): if not my_keys.verify_key_info_signature(key_json): raise Exception('key signature verification failed') if key_object.isPublic(): # received key is a public key if my_keys.is_key_registered(key_id): # but we already have a key with that ID if my_keys.is_key_private(key_id): # we should not overwrite existing private key raise Exception('private key already registered') if my_keys.get_public_key_raw(key_id) != key_object.toPublicString(): # and we should not overwrite existing public key as well raise Exception('another public key already registered with that ID and it is not matching') p2p_service.SendAck(newpacket) lg.warn('received existing public key: %s, skip' % key_id) return True if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') else: lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) if _Debug: lg.info('received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private'))) return True # received key is a private key if my_keys.is_key_registered(key_id): # check if we already have that key if my_keys.is_key_private(key_id): # we have already private key with same ID!!! if my_keys.get_private_key_raw(key_id) != key_object.toPrivateString(): # and this is a new private key : we should not overwrite! raise Exception('private key already registered and it is not matching') # this is the same private key p2p_service.SendAck(newpacket) lg.warn('received existing private key: %s, skip' % key_id) return True # but we have a public key with same ID already if my_keys.get_public_key_raw(key_id) != key_object.toPublicString(): # and we should not overwrite existing public key as well raise Exception('another public key already registered with that ID and it is not matching with private key') lg.info('erasing public key %s' % key_id) my_keys.erase_key(key_id) if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True # no private key with given ID was registered if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, strng.to_text(exc)) return False
def on_key_received(newpacket, info, status, error_message): """ Callback will be executed when I receive a new key from one remote user. """ block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.err('failed reading key info from %s' % newpacket.RemoteID) return False try: key_data = block.Data() key_json = serialization.BytesToDict(key_data, keys_to_text=True, values_to_text=True) # key_id = strng.to_text(key_json['key_id']) key_label = strng.to_text(key_json.get('label', '')) key_id, key_object = my_keys.read_key_info(key_json) if key_object.isSigned(): if not my_keys.verify_key_info_signature(key_json): raise Exception('received key signature verification failed: %r' % key_json) # TODO: must also compare "signature_pubkey" with pub key of the creator of the key! if key_object.isPublic(): # received key is a public key if my_keys.is_key_registered(key_id): # but we already have a key with that ID if my_keys.is_key_private(key_id): # we should not overwrite existing private key # TODO: check other scenarios raise Exception('private key already registered with %r' % key_id) if my_keys.get_public_key_raw(key_id) != key_object.toPublicString(): my_keys.erase_key(key_id) if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') else: lg.info('replaced existing key %s, is_public=%s' % (key_id, key_object.isPublic())) # normally should not overwrite existing public key # TODO: look more if need to add some extra checks # for example need to be able to overwrite or erase remotely some keys to cleanup # raise Exception('another public key already registered with %r and new key is not matching' % key_id) p2p_service.SendAck(newpacket) lg.warn('received existing public key: %s, skip' % key_id) return True if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') else: lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) if _Debug: lg.info('received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private'))) return True # received key is a private key if my_keys.is_key_registered(key_id): # check if we already have that key if my_keys.is_key_private(key_id): # we have already private key with same ID!!! if my_keys.get_private_key_raw(key_id) != key_object.toPrivateString(): # and this is a new private key : we should not overwrite! raise Exception('private key already registered and it is not matching with received copy') # this is the same private key p2p_service.SendAck(newpacket) lg.warn('received again an exact copy of already existing private key: %s, skip' % key_id) return True # but we have a public key with same ID already # if my_keys.get_public_key_raw(key_id) != key_object.toPublicString(): # # and we should not overwrite existing public key as well # raise Exception('another public key already registered with that ID and it is not matching with private key') lg.info('erasing public key %s' % key_id) my_keys.erase_key(key_id) if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True # no private key with given ID was registered if not my_keys.register_key(key_id, key_object, label=key_label): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, strng.to_text(exc)) return False