def register_customer_key(customer_public_key_id, customer_public_key): """ Check/refresh/store customer public key locally. """ if not customer_public_key_id or not customer_public_key: lg.warn('customer public key was not provided in the request') return False customer_public_key_id = my_keys.latest_key_id(customer_public_key_id) if my_keys.is_key_registered(customer_public_key_id): known_customer_public_key = my_keys.get_public_key_raw( customer_public_key_id) if known_customer_public_key == customer_public_key: lg.info( 'customer public key %r already known and public key is matching' % customer_public_key_id) else: lg.warn('rewriting customer public key %r' % customer_public_key_id) my_keys.erase_key(customer_public_key_id) key_id, key_object = my_keys.read_key_info(customer_public_key) if not my_keys.register_key(key_id, key_object): lg.err('failed to register customer public key: %r' % customer_public_key_id) return False lg.info('new customer public key registered: %r' % customer_public_key_id) return True
def set_archive_folder_path(group_key_id, archive_folder_path): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): lg.warn('group %r is not known' % group_key_id) return False active_groups()[group_key_id]['archive_folder_path'] = archive_folder_path return True
def audit_public_key(key_id, untrusted_idurl, timeout=10): """ Be sure remote user stores given public key. I also need to stores that public key in order to do such audit. I will send him a random string, he needs to encrypt it and send me back. I can compare his encrypted output with mine. Returns Deferred object. """ if _Debug: lg.out(_DebugLevel, 'key_ring.audit_public_key testing %s from %s' % (key_id, untrusted_idurl)) key_id = my_keys.latest_key_id(key_id) result = Deferred() recipient_id_obj = identitycache.FromCache(untrusted_idurl) if not recipient_id_obj: lg.warn('not found "%s" in identity cache' % untrusted_idurl) result.errback(Exception('not found "%s" in identity cache' % untrusted_idurl)) return result key_alias, creator_idurl = my_keys.split_key_id(key_id) if not key_alias or not creator_idurl: lg.warn('wrong key_id') result.errback(Exception('wrong key_id')) return result if untrusted_idurl == creator_idurl and key_alias == 'master': lg.info('doing audit of master key (public part) of remote user') else: if not my_keys.is_key_registered(key_id): lg.warn('unknown key: "%s"' % key_id) result.errback(Exception('unknown key: "%s"' % key_id)) return result public_test_sample = key.NewSessionKey(session_key_type=key.SessionKeyType()) json_payload = { 'key_id': key_id, 'audit': { 'public_sample': base64.b64encode(public_test_sample), 'private_sample': '', } } raw_payload = serialization.DictToBytes(json_payload, values_to_text=True) block = encrypted.Block( BackupID=key_id, Data=raw_payload, SessionKey=key.NewSessionKey(session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), # encrypt data using public key of recipient EncryptKey=lambda inp: recipient_id_obj.encrypt(inp), ) encrypted_payload = block.Serialize() p2p_service.SendAuditKey( remote_idurl=recipient_id_obj.getIDURL(), encrypted_payload=encrypted_payload, packet_id=key_id, timeout=timeout, callbacks={ commands.Ack(): lambda response, info: _on_audit_public_key_response(response, info, key_id, untrusted_idurl, public_test_sample, result), commands.Fail(): lambda response, info: result.errback(Exception(response)), None: lambda pkt_out: result.errback(Exception('timeout')), # timeout }, ) return result
def set_last_sequence_id(group_key_id, last_sequence_id): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): lg.warn('group %r is not known' % group_key_id) return False active_groups()[group_key_id]['last_sequence_id'] = last_sequence_id return True
def doSendHisFiles(self, *args, **kwargs): """ Action method. """ customer_key_id = my_keys.make_key_id( alias='customer', creator_idurl=self.customer_idurl) customer_key_id = my_keys.latest_key_id(customer_key_id) if my_keys.is_key_registered(customer_key_id): list_files.send( customer_idurl=self.customer_idurl, packet_id='%s:%s' % ( customer_key_id, packetid.UniqueID(), ), format_type=settings.ListFilesFormat(), key_id=customer_key_id, remote_idurl=self.customer_idurl, # send to the customer ) else: # if "customer" key is not delivered to me yet, use his "master" key list_files.send( customer_idurl=self.customer_idurl, packet_id='%s:%s' % ( customer_key_id, packetid.UniqueID(), ), format_type=settings.ListFilesFormat(), key_id=my_keys.make_key_id(alias='master', creator_idurl=self.customer_idurl), remote_idurl=self.customer_idurl, # send to the customer ) lg.err('key %s is not registered, not able to send his files' % customer_key_id)
def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called when command "Files" were received from one of my suppliers. This is an answer from given supplier (after my request) to get a list of our files stored on his machine. """ supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator target_key_id = my_keys.latest_key_id(list_files_global_id['key_id']) if not my_keys.is_key_private(target_key_id): lg.warn('key %r not registered, not possible to decrypt ListFiles() packet from %r' % (target_key_id, supplier_idurl, )) return False try: block = encrypted.Unserialize(newpacket.Payload, decrypt_key=target_key_id, ) input_data = block.Data() except: lg.err('failed decrypting data from packet %r received from %r' % (newpacket, supplier_idurl)) return False list_files_raw = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) remote_files_changed, backups2remove, paths2remove, missed_backups = backup_matrix.process_raw_list_files( supplier_num=num, list_files_text_body=list_files_raw, customer_idurl=None, is_in_sync=None, auto_create=False, ) list_files_orator.IncomingListFiles(newpacket) if remote_files_changed: backup_matrix.SaveLatestRawListFiles(supplier_idurl, list_files_raw) if _Debug: lg.args(_DebugLevel, supplier=nameurl.GetName(supplier_idurl), customer=nameurl.GetName(customer_idurl), backups2remove=len(backups2remove), paths2remove=len(paths2remove), files_changed=remote_files_changed, missed_backups=len(missed_backups), ) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def set_group_info(group_key_id, group_info=None): group_key_id = my_keys.latest_key_id(group_key_id) if not group_info: group_info = { 'last_sequence_id': -1, 'active': False, 'archive_folder_path': None, } active_groups()[group_key_id] = group_info return True
def set_group_active(group_key_id, value): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): lg.warn('group %r is not known' % group_key_id) return False old_value = active_groups()[group_key_id]['active'] active_groups()[group_key_id]['active'] = value if old_value != value: lg.info('group %r "active" status changed: %r -> %r' % (group_key_id, old_value, value, )) return True
def doPrepare(self, *args, **kwargs): """ Action method. """ self.restored_count = 0 self.saved_count = 0 self.deleted_count = 0 self.stored_keys = {} self.not_stored_keys = {} self.unreliable_keys = {} self.keys_to_upload = set() self.keys_to_erase = {} self.keys_to_rename = {} lookup = backup_fs.ListChildsByPath( path='.keys', recursive=False, ) if isinstance(lookup, list): minimum_reliable_percent = eccmap.GetCorrectablePercent( eccmap.Current().suppliers_number) for i in lookup: if i['path'].endswith('.public'): stored_key_id = i['path'].replace('.public', '').replace( '.keys/', '') is_private = False else: stored_key_id = i['path'].replace('.private', '').replace( '.keys/', '') is_private = True stored_key_id = my_keys.latest_key_id(stored_key_id) is_reliable = False for v in i['versions']: try: reliable = float(v['reliable'].replace('%', '')) except: lg.exc() reliable = 0.0 if reliable >= minimum_reliable_percent: is_reliable = True break if is_reliable: self.stored_keys[stored_key_id] = is_private else: if is_private and my_keys.is_key_private(stored_key_id): self.not_stored_keys[stored_key_id] = is_private elif not is_private and my_keys.is_key_registered( stored_key_id): self.not_stored_keys[stored_key_id] = is_private else: self.unreliable_keys[stored_key_id] = is_private if _Debug: lg.args(_DebugLevel, stored_keys=len(self.stored_keys), not_stored_keys=list(self.not_stored_keys.keys()), unreliable_keys=len(self.unreliable_keys))
def doRestoreKeys(self, *args, **kwargs): """ Action method. """ is_any_private_key_unreliable = bool(True in self.unreliable_keys.values()) if is_any_private_key_unreliable and not self.stored_keys: if _Debug: lg.args(_DebugLevel, unreliable_keys=self.unreliable_keys) lg.err('not possible to restore any keys, all backup copies unreliable stored_keys=%d not_stored_keys=%d unreliable_keys=%d' % ( len(self.stored_keys), len(self.not_stored_keys), len(self.unreliable_keys), )) self.automat('error', Exception('not possible to restore any keys, all backup copies unreliable')) return keys_to_be_restored = [] for key_id, is_private in self.stored_keys.items(): latest_key_id = my_keys.latest_key_id(key_id) if latest_key_id != key_id: self.keys_to_rename[key_id] = (latest_key_id, is_private, ) if my_keys.is_key_registered(key_id): if _Debug: lg.out(_DebugLevel, ' skip restoring already known key_id=%r' % key_id) continue if my_keys.is_key_registered(latest_key_id): if _Debug: lg.out(_DebugLevel, ' skip restoring already known latest key_id=%r' % latest_key_id) continue keys_to_be_restored.append((key_id, is_private, )) if _Debug: lg.args(_DebugLevel, keys_to_be_restored=len(keys_to_be_restored)) def _on_restored_one(res, pos, key_id): self.restored_count += 1 _do_restore_one(pos+1) return None def _on_failed_one(err, pos, key_id): lg.err('failed to restore key %r : %r' % (key_id, err, )) _do_restore_one(pos+1) return None def _do_restore_one(pos): if pos >= len(keys_to_be_restored): self.automat('restore-ok', True) return key_id, is_private = keys_to_be_restored[pos] res = key_ring.do_restore_key(key_id, is_private, wait_result=True) res.addCallback(_on_restored_one, pos, key_id) res.addErrback(_on_failed_one, pos, key_id) _do_restore_one(0)
def save_group_info(group_key_id): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): lg.warn('group %r is not known' % group_key_id) return False group_info = active_groups()[group_key_id] service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') group_info_path = os.path.join(groups_dir, group_key_id) if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) ret = local_fs.WriteTextFile(group_info_path, jsn.dumps(group_info)) if _Debug: lg.args(_DebugLevel, group_key_id=group_key_id, group_info_path=group_info_path, ret=ret) return ret
def doCleanKeys(self, *args, **kwargs): """ Action method. """ keys_deleted = [] for key_id, is_private in self.stored_keys.items(): latest_key_id = my_keys.latest_key_id(key_id) if key_id not in my_keys.known_keys() and latest_key_id not in my_keys.known_keys(): self.keys_to_erase[key_id] = is_private for key_id, is_private in self.keys_to_erase.items(): res = key_ring.do_delete_key(key_id, is_private) keys_deleted.append(res) self.deleted_count += 1 if _Debug: lg.args(_DebugLevel, restored=self.restored_count, saved=self.saved_count, deleted=self.deleted_count) self.automat('clean-ok')
def on_audit_key_received(newpacket, info, status, error_message): """ Callback will be executed when remote user would like to check if I poses given key locally. """ block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out(2, 'key_ring.on_audit_key_received ERROR reading data from %s' % newpacket.RemoteID) return False try: raw_payload = block.Data() json_payload = serialization.BytesToDict(raw_payload, keys_to_text=True, values_to_text=True) key_id = my_keys.latest_key_id(json_payload['key_id']) json_payload['audit'] public_sample = base64.b64decode(json_payload['audit']['public_sample']) private_sample = base64.b64decode(json_payload['audit']['private_sample']) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False if not my_keys.is_valid_key_id(key_id): p2p_service.SendFail(newpacket, 'invalid key id') return False if not my_keys.is_key_registered(key_id, include_master=True): p2p_service.SendFail(newpacket, 'key not registered') return False if public_sample: response_payload = base64.b64encode(my_keys.encrypt(key_id, public_sample)) p2p_service.SendAck(newpacket, response_payload) if _Debug: lg.info('remote user %s requested audit of public key %s' % (newpacket.OwnerID, key_id)) return True if private_sample: if not my_keys.is_key_private(key_id): p2p_service.SendFail(newpacket, 'private key not registered') return False response_payload = base64.b64encode(my_keys.decrypt(key_id, private_sample)) p2p_service.SendAck(newpacket, response_payload) if _Debug: lg.info('remote user %s requested audit of private key %s' % (newpacket.OwnerID, key_id)) return True p2p_service.SendFail(newpacket, 'wrong audit request') return False
def on_list_files(newpacket): json_query = {} try: j = serialization.BytesToDict(newpacket.Payload, keys_to_text=True, values_to_text=True) j['items'][0] json_query = j except: if strng.to_text(newpacket.Payload) == settings.ListFilesFormat(): json_query = { 'items': [ '*', ], } if json_query is None: lg.exc('unrecognized ListFiles() query received') return False # TODO: perform validations before sending back list of files list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if list_files_global_id['key_id']: # customer id and data id can be recognized from packet id # return back list of files according to the request customer_idurl = list_files_global_id['idurl'] key_id = list_files_global_id['key_id'] else: # packet id format is unknown # by default returning back all files from that recipient if he is a customer customer_idurl = newpacket.OwnerID key_id = my_keys.make_key_id(alias='customer', creator_idurl=customer_idurl) key_id = my_keys.latest_key_id(key_id) list_files.send( customer_idurl=customer_idurl, packet_id=newpacket.PacketID, format_type=settings.ListFilesFormat(), key_id=key_id, remote_idurl=newpacket.OwnerID, # send back to the requesting node query_items=json_query['items'], ) return True
def share_key(key_id, trusted_idurl, include_private=False, include_signature=False, timeout=15): """ Method to be used to send given key to one trusted user. Make sure remote user is identified and connected. Returns deferred, callback will be fired with response Ack() packet argument. """ if _Debug: lg.args(_DebugLevel, key_id=key_id, trusted_idurl=trusted_idurl) key_id = my_keys.latest_key_id(key_id) result = Deferred() d = online_status.ping( idurl=trusted_idurl, ack_timeout=timeout, channel='share_key', keep_alive=False, ) d.addCallback(lambda ok: _do_request_service_keys_registry( key_id, trusted_idurl, include_private, include_signature, timeout, result, )) d.addErrback(result.errback) return result
def generate_group_key(creator_id=None, label=None, key_size=4096, group_alias=None): group_key_id = None if group_alias: group_key_id = my_keys.make_key_id(alias=group_alias, creator_glob_id=creator_id) if my_keys.is_key_registered(group_key_id): return my_keys.latest_key_id(group_key_id) else: while True: random_sample = os.urandom(24) group_alias = 'group_%s' % strng.to_text(key.HashMD5(random_sample, hexdigest=True)) group_key_id = my_keys.make_key_id(alias=group_alias, creator_glob_id=creator_id) if my_keys.is_key_registered(group_key_id): continue break if not label: label = 'group%s' % utime.make_timestamp() my_keys.generate_key(key_id=group_key_id, label=label, key_size=key_size) my_keys.sign_key(key_id=group_key_id, save=True) if _Debug: lg.args(_DebugLevel, group_key_id=group_key_id, group_alias=group_alias, creator_id=creator_id, label=label) return group_key_id
def on_identity_url_changed(evt): from access import group_member service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') brokers_dir = os.path.join(service_dir, 'brokers') old_idurl = id_url.field(evt.data['old_idurl']) new_idurl = id_url.field(evt.data['new_idurl']) active_group_keys = list(active_groups()) to_be_reconnected = [] for group_key_id in active_group_keys: if not group_key_id: continue group_creator_idurl = global_id.glob2idurl(group_key_id) if id_url.is_the_same(group_creator_idurl, old_idurl): old_group_path = os.path.join(groups_dir, group_key_id) latest_group_key_id = my_keys.latest_key_id(group_key_id) latest_group_path = os.path.join(groups_dir, latest_group_key_id) lg.info('going to rename rotated group file: %r -> %r' % (old_group_path, latest_group_path, )) if os.path.isfile(old_group_path): try: os.rename(old_group_path, latest_group_path) except: lg.exc() continue else: lg.warn('key file %r was not found, key was not renamed' % old_group_path) active_groups()[latest_group_key_id] = active_groups().pop(group_key_id) group_member.rotate_active_group_memeber(group_key_id, latest_group_key_id) gm = group_member.get_active_group_member(group_key_id) if gm and gm.connected_brokers and id_url.is_in(old_idurl, gm.connected_brokers.values()): lg.info('connected broker %r IDURL is rotated, going to reconnect %r' % (old_idurl, gm, )) if group_key_id not in to_be_reconnected: to_be_reconnected.append(group_key_id) known_customers = list(known_brokers().keys()) for customer_id in known_customers: latest_customer_id = global_id.idurl2glob(new_idurl) customer_idurl = global_id.glob2idurl(customer_id) if id_url.is_the_same(customer_idurl, old_idurl): latest_customer_dir = os.path.join(brokers_dir, latest_customer_id) lg.info('going to rename rotated customer id: %r -> %r' % (customer_id, latest_customer_id, )) old_customer_dir = os.path.join(brokers_dir, customer_id) if os.path.isdir(old_customer_dir): try: bpio.move_dir_recursive(old_customer_dir, latest_customer_dir) bpio.rmdir_recursive(old_customer_dir) except: lg.exc() continue known_brokers()[latest_customer_id] = known_brokers().pop(customer_id) for broker_pos, broker_id in enumerate(known_brokers(latest_customer_id)): if not broker_id: continue broker_idurl = global_id.glob2idurl(broker_id) if broker_idurl == old_idurl: latest_broker_id = global_id.idurl2glob(new_idurl) latest_broker_path = os.path.join(latest_customer_dir, latest_broker_id) lg.info('going to rename rotated broker id: %r -> %r' % (broker_id, latest_broker_id, )) old_broker_path = os.path.join(latest_customer_dir, broker_id) if os.path.isfile(old_broker_path): try: os.rename(old_broker_path, latest_broker_path) except: lg.exc() continue if latest_broker_id in known_brokers(latest_customer_id): lg.warn('broker %r already exist' % latest_broker_id) continue known_brokers()[latest_customer_id][broker_pos] = latest_broker_id if _Debug: lg.args(_DebugLevel, to_be_reconnected=to_be_reconnected) for group_key_id in to_be_reconnected: gm = group_member.get_active_group_member(group_key_id) if gm: gm.automat('reconnect')
def SendListFiles(target_supplier, customer_idurl=None, key_id=None, query_items=[], wide=False, callbacks={}, timeout=None): """ This is used as a request method from your supplier : if you send him a ListFiles() packet he will reply you with a list of stored files in a Files() packet. """ MyID = my_id.getIDURL() if not customer_idurl: customer_idurl = MyID if not str(target_supplier).isdigit(): RemoteID = target_supplier else: RemoteID = contactsdb.supplier(target_supplier, customer_idurl=customer_idurl) if not RemoteID: lg.warn("RemoteID is empty target_supplier=%s" % str(target_supplier)) return None if not key_id: # key_id = global_id.MakeGlobalID(idurl=customer_idurl, key_alias='customer') # TODO: due to issue with "customer" key backup/restore decided to always use my "master" key # to retrieve my list files info from supplier # expect remote user always poses my master public key from my identity. # probably require more work to build more reliable solution without using my master key at all # when my identity rotated supplier first needs to receive my new identity and then sending ListFiles() key_id = my_id.getGlobalID(key_alias='master') else: key_id = my_keys.latest_key_id(key_id) if not my_keys.is_key_registered(key_id) or not my_keys.is_key_private( key_id): lg.warn( 'key %r not exist or public, my "master" key to be used with ListFiles() packet' % key_id) key_id = my_id.getGlobalID(key_alias='master') PacketID = "%s:%s" % ( key_id, packetid.UniqueID(), ) if not query_items: query_items = [ '*', ] Payload = serialization.DictToBytes({ 'items': query_items, }) if _Debug: lg.out( _DebugLevel, "p2p_service.SendListFiles %r to %r of customer %r with query : %r" % ( PacketID, nameurl.GetName(RemoteID), nameurl.GetName(customer_idurl), query_items, )) result = signed.Packet( Command=commands.ListFiles(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=RemoteID, ) gateway.outbox(result, wide=wide, callbacks=callbacks, response_timeout=timeout) return result
def is_group_active(group_key_id): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): return False return active_groups()[group_key_id]['active']
def request(self, json_payload, newpacket, info): from twisted.internet import reactor # @UnresolvedImport from logs import lg from main import events from crypt import my_keys from p2p import p2p_service from contacts import contactsdb from storage import accounting from supplier import customer_space from userid import id_url from userid import global_id customer_idurl = newpacket.OwnerID customer_id = global_id.UrlToGlobalID(customer_idurl) bytes_for_customer = 0 try: bytes_for_customer = int(json_payload['needed_bytes']) except: lg.exc() return p2p_service.SendFail(newpacket, 'invalid payload') try: customer_public_key = json_payload['customer_public_key'] customer_public_key_id = customer_public_key['key_id'] except: customer_public_key = None customer_public_key_id = None data_owner_idurl = None target_customer_idurl = None family_position = json_payload.get('position') ecc_map = json_payload.get('ecc_map') family_snapshot = json_payload.get('family_snapshot') if family_snapshot: family_snapshot = id_url.to_bin_list(family_snapshot) key_id = json_payload.get('key_id') key_id = my_keys.latest_key_id(key_id) target_customer_id = json_payload.get('customer_id') if key_id: # this is a request from external user to access shared data stored by one of my customers # this is "second" customer requesting data from "first" customer if not key_id or not my_keys.is_valid_key_id(key_id): lg.warn('missed or invalid key id') return p2p_service.SendFail(newpacket, 'invalid key id') target_customer_idurl = global_id.GlobalUserToIDURL( target_customer_id) if not contactsdb.is_customer(target_customer_idurl): lg.warn("target user %s is not a customer" % target_customer_id) return p2p_service.SendFail(newpacket, 'not a customer') if target_customer_idurl == customer_idurl: lg.warn('customer %s requesting shared access to own files' % customer_idurl) return p2p_service.SendFail(newpacket, 'invalid case') if not my_keys.is_key_registered(key_id): lg.warn('key not registered: %s' % key_id) p2p_service.SendFail(newpacket, 'key not registered') return False data_owner_idurl = my_keys.split_key_id(key_id)[1] if data_owner_idurl != target_customer_idurl and data_owner_idurl != customer_idurl: # pretty complex scenario: # external customer requesting access to data which belongs not to that customer # this is "third" customer accessing data belongs to "second" customer # TODO: for now just stop it lg.warn( 'under construction, key_id=%s customer_idurl=%s target_customer_idurl=%s' % ( key_id, customer_idurl, target_customer_idurl, )) p2p_service.SendFail(newpacket, 'under construction') return False customer_space.register_customer_key(customer_public_key_id, customer_public_key) # do not create connection with that customer, only accept the request lg.info( 'external customer %s requested access to shared data at %s' % ( customer_id, key_id, )) return p2p_service.SendAck(newpacket, 'accepted') # key_id is not present in the request: # this is a request to connect new customer (or reconnect existing one) to that supplier if not bytes_for_customer or bytes_for_customer < 0: lg.warn("wrong payload : %s" % newpacket.Payload) return p2p_service.SendFail(newpacket, 'wrong storage value') current_customers = contactsdb.customers() if accounting.check_create_customers_quotas(): lg.info('created new customers quotas file') space_dict, free_space = accounting.read_customers_quotas() try: free_bytes = int(free_space) except: lg.exc() return p2p_service.SendFail(newpacket, 'broken space file') if (customer_idurl not in current_customers and customer_idurl.to_bin() in list(space_dict.keys())): lg.warn("broken space file") return p2p_service.SendFail(newpacket, 'broken space file') if (customer_idurl in current_customers and customer_idurl.to_bin() not in list(space_dict.keys())): # seems like customer's idurl was rotated, but space file still have the old idurl # need to find that old idurl value and replace with the new one for other_customer_idurl in space_dict.keys(): if other_customer_idurl and other_customer_idurl != 'free' and id_url.field( other_customer_idurl) == customer_idurl: lg.info( 'found rotated customer identity in space file, switching: %r -> %r' % (other_customer_idurl, customer_idurl.to_bin())) space_dict[customer_idurl.to_bin()] = space_dict.pop( other_customer_idurl) break if customer_idurl.to_bin() not in list(space_dict.keys()): lg.warn("broken customers file") return p2p_service.SendFail(newpacket, 'broken customers file') if customer_idurl in current_customers: free_bytes += int(space_dict.get(customer_idurl.to_bin(), 0)) current_customers.remove(customer_idurl) space_dict.pop(customer_idurl.to_bin()) new_customer = False else: new_customer = True lg.args(8, new_customer=new_customer, current_allocated_bytes=space_dict.get( customer_idurl.to_bin())) from supplier import local_tester if free_bytes <= bytes_for_customer: contactsdb.remove_customer_meta_info(customer_idurl) accounting.write_customers_quotas(space_dict, free_bytes) contactsdb.update_customers(current_customers) contactsdb.save_customers() if customer_public_key_id: my_keys.erase_key(customer_public_key_id) reactor.callLater( 0, local_tester.TestUpdateCustomers) # @UndefinedVariable if new_customer: lg.info("NEW CUSTOMER: DENIED not enough space available") events.send('new-customer-denied', data=dict(idurl=customer_idurl)) else: lg.info("OLD CUSTOMER: DENIED not enough space available") events.send('existing-customer-denied', data=dict(idurl=customer_idurl)) return p2p_service.SendAck(newpacket, 'deny') free_bytes = free_bytes - bytes_for_customer current_customers.append(customer_idurl) space_dict[customer_idurl.to_bin()] = bytes_for_customer contactsdb.add_customer_meta_info( customer_idurl, { 'ecc_map': ecc_map, 'position': family_position, 'family_snapshot': family_snapshot, }) accounting.write_customers_quotas(space_dict, free_bytes) contactsdb.update_customers(current_customers) contactsdb.save_customers() customer_space.register_customer_key(customer_public_key_id, customer_public_key) reactor.callLater( 0, local_tester.TestUpdateCustomers) # @UndefinedVariable if new_customer: lg.info( "NEW CUSTOMER: ACCEPTED %s family_position=%s ecc_map=%s allocated_bytes=%s" % (customer_idurl, family_position, ecc_map, bytes_for_customer)) events.send('new-customer-accepted', data=dict( idurl=customer_idurl, allocated_bytes=bytes_for_customer, ecc_map=ecc_map, position=family_position, family_snapshot=family_snapshot, key_id=customer_public_key_id, )) else: lg.info( "OLD CUSTOMER: ACCEPTED %s family_position=%s ecc_map=%s allocated_bytes=%s" % (customer_idurl, family_position, ecc_map, bytes_for_customer)) events.send('existing-customer-accepted', data=dict( idurl=customer_idurl, allocated_bytes=bytes_for_customer, ecc_map=ecc_map, position=family_position, key_id=customer_public_key_id, family_snapshot=family_snapshot, )) return p2p_service.SendAck(newpacket, 'accepted')
def get_archive_folder_path(group_key_id): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): return None return active_groups()[group_key_id].get('archive_folder_path', None)
def transfer_key(key_id, trusted_idurl, include_private=False, include_signature=False, timeout=10, result=None): """ Actually sending given key to remote user. """ if _Debug: lg.out(_DebugLevel, 'key_ring.transfer_key %s -> %s' % (key_id, trusted_idurl)) key_id = my_keys.latest_key_id(key_id) if not result: result = Deferred() recipient_id_obj = identitycache.FromCache(trusted_idurl) if not recipient_id_obj: lg.warn('not found "%s" in identity cache' % trusted_idurl) result.errback(Exception('not found "%s" in identity cache' % trusted_idurl)) return result key_alias, creator_idurl = my_keys.split_key_id(key_id) if not key_alias or not creator_idurl: lg.warn('wrong key_id') result.errback(Exception('wrong key_id')) return result if not my_keys.is_key_registered(key_id): lg.warn('unknown key: "%s"' % key_id) result.errback(Exception('unknown key: "%s"' % key_id)) return result key_object = my_keys.key_obj(key_id) try: key_json = my_keys.make_key_info( key_object, key_id=key_id, include_private=include_private, generate_signature=include_signature, ) except Exception as exc: lg.exc() result.errback(exc) return result if include_signature and not my_keys.verify_key_info_signature(key_json): lg.err('signature verification failed after making key info: %r' % key_json) result.errback(Exception('signature verification failed after making key info: "%s"' % key_id)) return result if _Debug: lg.args(_DebugLevel, key_json=key_json) key_data = serialization.DictToBytes(key_json, values_to_text=True) block = encrypted.Block( BackupID=key_id, Data=key_data, SessionKey=key.NewSessionKey(session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), # encrypt data using public key of recipient EncryptKey=lambda inp: recipient_id_obj.encrypt(inp), ) encrypted_key_data = block.Serialize() p2p_service.SendKey( remote_idurl=recipient_id_obj.getIDURL(), encrypted_key_data=encrypted_key_data, packet_id=key_id, callbacks={ commands.Ack(): lambda response, info: _on_transfer_key_response(response, info, key_id, result), commands.Fail(): lambda response, info: _on_transfer_key_response(response, info, key_id, result), None: lambda pkt_out: _on_transfer_key_response(None, None, key_id, result), }, timeout=timeout, ) return result
def get_last_sequence_id(group_key_id): group_key_id = my_keys.latest_key_id(group_key_id) if not is_group_exist(group_key_id): return -1 return active_groups()[group_key_id]['last_sequence_id']
def load_groups(): loaded_brokers = 0 loaded_groups = 0 service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): latest_group_key_id = my_keys.latest_key_id(group_key_id) latest_group_path = os.path.join(groups_dir, latest_group_key_id) if latest_group_key_id != group_key_id: lg.info('going to rename rotated group key: %r -> %r' % (group_key_id, latest_group_key_id, )) old_group_path = os.path.join(groups_dir, group_key_id) try: os.rename(old_group_path, latest_group_path) except: lg.exc() continue latest_group_info = jsn.loads_text(local_fs.ReadTextFile(latest_group_path)) if not latest_group_info: lg.err('was not able to load group info from %r' % latest_group_path) continue active_groups()[latest_group_key_id] = latest_group_info loaded_groups += 1 for customer_id in os.listdir(brokers_dir): latest_customer_id = global_id.latest_glob_id(customer_id) latest_customer_dir = os.path.join(brokers_dir, latest_customer_id) if latest_customer_id != customer_id: lg.info('going to rename rotated customer id: %r -> %r' % (customer_id, latest_customer_id, )) old_customer_dir = os.path.join(brokers_dir, customer_id) try: bpio.move_dir_recursive(old_customer_dir, latest_customer_dir) bpio.rmdir_recursive(old_customer_dir) except: lg.exc() continue for broker_id in os.listdir(latest_customer_dir): if latest_customer_id not in known_brokers(): known_brokers(latest_customer_id) latest_broker_id = global_id.latest_glob_id(broker_id) latest_broker_path = os.path.join(latest_customer_dir, latest_broker_id) if latest_broker_id != broker_id: lg.info('going to rename rotated broker id: %r -> %r' % (broker_id, latest_broker_id, )) old_broker_path = os.path.join(latest_customer_dir, broker_id) try: os.rename(old_broker_path, latest_broker_path) except: lg.exc() continue latest_broker_info = jsn.loads_text(local_fs.ReadTextFile(latest_broker_path)) if not latest_broker_info: lg.err('was not able to load broker info from %r' % latest_broker_path) continue existing_broker_id = known_brokers(latest_customer_id)[int(latest_broker_info['position'])] if existing_broker_id: if os.path.isfile(latest_broker_path): lg.err('found duplicated broker for customer %r on position %d, erasing file %r' % ( latest_customer_id, int(latest_broker_info['position']), latest_broker_path, )) try: os.remove(latest_broker_path) except: lg.exc() continue known_brokers()[latest_customer_id][int(latest_broker_info['position'])] = latest_broker_id loaded_brokers += 1 if _Debug: lg.args(_DebugLevel, loaded_groups=loaded_groups, loaded_brokers=loaded_brokers)
def do_backup_key(key_id, keys_folder=None): """ Send given key to my suppliers to store it remotely. This will make a regular backup copy of that key file - encrypted with my master key. """ key_id = my_keys.latest_key_id(key_id) if _Debug: lg.out(_DebugLevel, 'key_ring.do_backup_key key_id=%r' % key_id) if key_id == my_id.getGlobalID(key_alias='master') or key_id == 'master': lg.err('master key must never leave local host') return fail(Exception('master key must never leave local host')) if not my_keys.is_key_registered(key_id): lg.err('unknown key: "%s"' % key_id) return fail(Exception('unknown key: "%s"' % key_id)) if not keys_folder: keys_folder = settings.KeyStoreDir() if my_keys.is_key_private(key_id): local_key_filepath = os.path.join(keys_folder, '%s.private' % key_id) remote_path_for_key = '.keys/%s.private' % key_id else: local_key_filepath = os.path.join(keys_folder, '%s.public' % key_id) remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) res = api.file_exists(global_key_path) if res['status'] == 'OK' and res['result'] and res['result'].get('exist'): lg.warn('key %s already exists in catalog' % global_key_path) global_key_path_id = res['result'].get('path_id') if global_key_path_id and backup_control.IsPathInProcess(global_key_path_id): lg.warn('skip, another backup for key already started: %s' % global_key_path_id) backup_id_list = backup_control.FindRunningBackup(global_key_path_id) if backup_id_list: backup_id = backup_id_list[0] backup_job = backup_control.GetRunningBackupObject(backup_id) if backup_job: backup_result = Deferred() backup_job.resultDefer.addCallback( lambda resp: backup_result.callback(True) if resp == 'done' else backup_result.errback( Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp)))) if _Debug: backup_job.resultDefer.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='key_ring.do_backup_key') backup_job.resultDefer.addErrback(backup_result.errback) if _Debug: lg.args(_DebugLevel, backup_id=backup_id, global_key_path_id=global_key_path_id) return backup_result else: lg.warn('did not found running backup job: %r' % backup_id) else: lg.warn('did not found running backup id for path: %r' % global_key_path_id) else: res = api.file_create(global_key_path) if res['status'] != 'OK': lg.err('failed to create path "%s" in the catalog: %r' % (global_key_path, res)) return fail(Exception('failed to create path "%s" in the catalog: %r' % (global_key_path, res))) res = api.file_upload_start( local_path=local_key_filepath, remote_path=global_key_path, wait_result=True, wait_finish=False, open_share=False, ) backup_result = Deferred() # TODO: put that code bellow into api.file_upload_start() method with additional parameter def _job_done(result): if _Debug: lg.args(_DebugLevel, key_id=key_id, result=result) if result == 'done': backup_result.callback(True) else: backup_result.errback(Exception('failed to upload key "%s", backup is %r' % (key_id, result))) return None def _task_started(resp): if _Debug: lg.args(_DebugLevel, key_id=key_id, upload_response_status=resp['status']) if resp['status'] != 'OK': backup_result.errback(Exception('failed to upload key "%s", task was not started: %r' % (global_key_path, resp))) return None backupObj = backup_control.jobs().get(resp['version']) if not backupObj: backup_result.errback(Exception('failed to upload key "%s", task %r failed to start' % (global_key_path, resp['version']))) return None backupObj.resultDefer.addCallback(_job_done) backupObj.resultDefer.addErrback(backup_result.errback) return None if not isinstance(res, Deferred): res_defer = Deferred() res_defer.callback(res) res = res_defer res.addCallback(_task_started) res.addErrback(backup_result.errback) return backup_result
def is_group_exist(group_key_id): group_key_id = my_keys.latest_key_id(group_key_id) return group_key_id in active_groups()
def send(customer_idurl, packet_id, format_type, key_id, remote_idurl, query_items=[]): if not query_items: query_items = [ '*', ] key_id = my_keys.latest_key_id(key_id) parts = global_id.ParseGlobalID(key_id) if parts['key_alias'] == 'master' and parts['idurl'] != my_id.getIDURL(): # lg.warn('incoming ListFiles() request with customer "master" key: %r' % key_id) if not my_keys.is_key_registered(key_id) and identitycache.HasKey( parts['idurl']): lg.info( 'customer public key %r to be registered locally for the first time' % key_id) known_ident = identitycache.FromCache(parts['idurl']) if not my_keys.register_key(key_id, known_ident.getPublicKey()): lg.err( 'failed to register known public key of the customer: %r' % key_id) if not my_keys.is_key_registered(key_id): lg.warn( 'not able to return Files() for customer %s, key %s not registered' % ( customer_idurl, key_id, )) return p2p_service.SendFailNoRequest(customer_idurl, packet_id, response='key not registered') if _Debug: lg.out( _DebugLevel, "list_files.send to %s, customer_idurl=%s, key_id=%s, query_items=%r" % ( remote_idurl, customer_idurl, key_id, query_items, )) ownerdir = settings.getCustomerFilesDir(customer_idurl) plaintext = '' if os.path.isdir(ownerdir): try: for query_path in query_items: plaintext += process_query_item(query_path, parts['key_alias'], ownerdir) except: lg.exc() return p2p_service.SendFailNoRequest( customer_idurl, packet_id, response='list files query processing error') else: lg.warn('did not found customer folder: %s' % ownerdir) if _Debug: lg.out(_DebugLevel, '\n%s' % plaintext) raw_list_files = PackListFiles(plaintext, format_type) block = encrypted.Block( CreatorID=my_id.getIDURL(), BackupID=key_id, Data=raw_list_files, SessionKey=key.NewSessionKey(session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), EncryptKey=key_id, ) encrypted_list_files = block.Serialize() newpacket = p2p_service.SendFiles( idurl=remote_idurl, raw_list_files_info=encrypted_list_files, packet_id=packet_id, callbacks={ commands.Ack(): on_acked, commands.Fail(): on_failed, None: on_timeout, }, ) return newpacket
def do_restore_key(key_id, is_private, keys_folder=None, wait_result=False): """ Restore given key from my suppliers if I do not have it locally. """ if _Debug: lg.out(_DebugLevel, 'key_ring.do_restore_key key_id=%r is_private=%r' % (key_id, is_private, )) key_id = my_keys.latest_key_id(key_id) if my_keys.is_key_registered(key_id): lg.err('local key already exist: "%s"' % key_id) if wait_result: return fail(Exception('local key already exist: "%s"' % key_id)) return False if not keys_folder: keys_folder = settings.KeyStoreDir() if is_private: remote_path_for_key = '.keys/%s.private' % key_id else: remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) ret = api.file_download_start( remote_path=global_key_path, destination_path=keys_folder, wait_result=True, open_share=False, ) if not isinstance(ret, Deferred): lg.err('failed to download key "%s": %s' % (key_id, ret)) if wait_result: return fail(Exception('failed to download key "%s": %s' % (key_id, ret))) return False result = Deferred() def _on_result(res): if not isinstance(res, dict): lg.err('failed to download key "%s": %s' % (key_id, res)) if wait_result: result.errback(Exception('failed to download key "%s": %s' % (key_id, res))) return None if res['status'] != 'OK': lg.err('failed to download key "%s": %r' % (key_id, res)) if wait_result: result.errback(Exception('failed to download key "%s": %r' % (key_id, res))) return None if not my_keys.load_key(key_id, keys_folder): lg.err('failed to read key "%s" from local folder "%s"' % (key_id, keys_folder)) if wait_result: result.errback(Exception('failed to read key "%s" from local folder "%s"' % (key_id, keys_folder))) return None if _Debug: lg.out(_DebugLevel, 'key_ring.do_restore_key._on_result key_id=%s is_private=%r : %r' % (key_id, is_private, res)) if wait_result: result.callback(res) return None ret.addBoth(_on_result) if not wait_result: return True return result
def verify_packet_ownership(newpacket, raise_exception=False): """ At that point packet creator is already verified via signature, but creator could be not authorized to store data on that node. So based on owner ID decision must be made what to do with the packet. Returns IDURL of the user who should receive and Ack() or None if not authorized. """ # SECURITY owner_idurl = newpacket.OwnerID creator_idurl = newpacket.CreatorID owner_id = owner_idurl.to_id() creator_id = creator_idurl.to_id() packet_key_alias, packet_owner_id, _ = packetid.SplitKeyOwnerData( newpacket.PacketID) packet_key_id = my_keys.latest_key_id( my_keys.make_key_id(packet_key_alias, creator_idurl, creator_glob_id=packet_owner_id)) if _Debug: lg.args(_DebugLevel, owner_id=owner_id, creator_id=creator_id, packet_id=newpacket.PacketID, key_id_registered=my_keys.is_key_registered(packet_key_id)) if newpacket.Command == commands.Data(): if owner_idurl.to_bin() == creator_idurl.to_bin(): if contactsdb.is_customer(creator_idurl): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 1: customer is sending own data to own supplier' ) return owner_idurl lg.err( 'FAIL, scenario 6: user is not my customer but trying to store data' ) if raise_exception: raise Exception( 'non-authorized user is trying to store data on the supplier' ) return None if contactsdb.is_customer(creator_idurl): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 2: customer wants to store data for someone else on own supplier' ) # TODO: check that, why do we need that? return creator_idurl if packet_owner_id == owner_id: if contactsdb.is_customer(owner_idurl): if my_keys.is_key_registered(packet_key_id): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 3: another authorized user is sending data to customer to be stored on the supplier' ) return creator_idurl lg.err('non-authorized user is trying to store data on the supplier') return None if newpacket.Command in [ commands.DeleteFile(), commands.DeleteBackup(), ]: if owner_idurl == creator_idurl: if contactsdb.is_customer(creator_idurl): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 4: customer wants to remove already stored data on own supplier' ) return owner_idurl lg.err( 'FAIL, scenario 7: non-authorized user is trying to erase data owned by customer from the supplier' ) if raise_exception: raise Exception( 'non-authorized user is trying to erase data owned by customer from the supplier' ) return None if contactsdb.is_customer(creator_idurl): # TODO: check that, why do we need that? if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 8: customer wants to erase existing data that belongs to someone else but stored on the supplier' ) return creator_idurl if packet_owner_id == owner_id: if contactsdb.is_customer(owner_idurl): if my_keys.is_key_registered(packet_key_id): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 5: another authorized user wants to remove already stored data from the supplier' ) return creator_idurl lg.err('non-authorized user is trying to erase data on the supplier') return None if driver.is_enabled('service_proxy_server'): if _Debug: lg.dbg( _DebugLevel, 'IGNORE, scenario 9: received Data() not authorized, but proxy router service was enabled' ) return None # TODO: # scenario 9: make possible to set "active" flag True/False for any key # this way customer can make virtual location available for other user but in read-only mode raise Exception('scenario not implemented yet, received %r' % newpacket)