def make_key_id(alias, creator_idurl=None, creator_glob_id=None): """ Every key has a creator, and we include his IDURL in the final key_id string. Here is a global unique address to a remote copy of `cat.png` file: [email protected]:animals/cat.png#F20160313043757PM key_id here is: [email protected] key alias is `group_abc` and creator IDURL is: http://first-machine.com/alice.xml By knowing full key_id we can find and connect to the correct node(s) who is supporting that resource. """ if not alias: alias = 'master' if creator_glob_id is not None: return global_id.MakeGlobalID( customer=creator_glob_id, key_alias=alias, ) if creator_idurl is None: creator_idurl = my_id.getLocalID() return global_id.MakeGlobalID( idurl=creator_idurl, key_alias=alias, )
def set_key_id(self, key_id): self.keyID = key_id self.keyAlias = packetid.KeyAlias(self.keyID) self.fullGlobPath = global_id.MakeGlobalID( customer=self.customerGlobID, key_alias=self.keyAlias, path=self.remotePath) self.fullCustomerID = global_id.MakeGlobalID( customer=self.customerGlobID, key_alias=self.keyAlias)
def doCancelServiceQueue(self, *args, **kwargs): """ Action method. """ service_info = { 'items': [{ 'scope': 'consumer', 'action': 'unsubscribe', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'queue_id': global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=my_id.getGlobalID(), supplier_id=global_id.MakeGlobalID(idurl=self.supplier_idurl), ), }, { 'scope': 'consumer', 'action': 'remove_callback', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'method': strng.to_text(my_id.getLocalID()), }, { 'scope': 'consumer', 'action': 'stop', 'consumer_id': strng.to_text(my_id.getGlobalID()), }, ], } p2p_service.SendCancelService( remote_idurl=self.supplier_idurl, service_name='service_p2p_notifications', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_acked, commands.Fail(): self._supplier_failed, }, )
def _do_retrieve(self, x=None): packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) localID = my_id.getIDURL() for supplier_idurl in contactsdb.suppliers(): if not supplier_idurl: continue sc = supplier_connector.by_idurl(supplier_idurl) if sc is None or sc.state != 'CONNECTED': continue if online_status.isOffline(supplier_idurl): continue pkt_out = p2p_service.SendRetreive(ownerID=localID, creatorID=localID, packetID=packetID, remoteID=supplier_idurl, response_timeout=60 * 2, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_fail, }) if pkt_out: self.requesting_suppliers.add(supplier_idurl) self.requested_suppliers_number += 1 self.requests_packets_sent.append((packetID, supplier_idurl)) if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplier_idurl)))
def encrypt(self, message_body, encrypt_session_func=None): if _Debug: lg.args(_DebugLevel, encrypt_session_func=encrypt_session_func, recipient=self.recipient) new_sessionkey = key.NewSessionKey(session_key_type=key.SessionKeyType()) if not encrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.dbg(_DebugLevel, 'with registered key %r' % self.recipient) encrypt_session_func = lambda inp: my_keys.encrypt(self.recipient, inp) if not encrypt_session_func: glob_id = global_id.NormalizeGlobalID(self.recipient) if glob_id['key_alias'] == 'master': if glob_id['idurl'] == my_id.getIDURL(): lg.warn('making encrypted message addressed to me ?') encrypt_session_func = lambda inp: my_keys.encrypt('master', inp) else: remote_identity = identitycache.FromCache(glob_id['idurl']) if not remote_identity: raise Exception('remote identity is not cached yet, not able to encrypt the message') if _Debug: lg.dbg(_DebugLevel, 'with remote identity public key %r' % glob_id['idurl']) encrypt_session_func = remote_identity.encrypt else: own_key = global_id.MakeGlobalID(idurl=my_id.getIDURL(), key_alias=glob_id['key_alias']) if my_keys.is_key_registered(own_key): if _Debug: lg.dbg(_DebugLevel, 'with registered key (found by alias) %r' % own_key) encrypt_session_func = lambda inp: my_keys.encrypt(own_key, inp) if not encrypt_session_func: raise Exception('can not find key for given recipient') self.encrypted_session = encrypt_session_func(new_sessionkey) self.encrypted_body = key.EncryptWithSessionKey(new_sessionkey, message_body, session_key_type=key.SessionKeyType()) return self.encrypted_session, self.encrypted_body
def serialize_json(self): """ A method to generate JSON dictionary for that identity object. Used to represent identity in API methods. """ return { 'name': self.getIDName(), 'idurl': strng.to_text(self.getIDURL().original()), 'global_id': global_id.MakeGlobalID(idurl=self.getIDURL().original()), 'sources': [strng.to_text(i) for i in self.getSources(as_originals=True)], 'contacts': [strng.to_text(i) for i in self.getContacts()], 'certificates': [strng.to_text(i) for i in self.certificates], 'scrubbers': [strng.to_text(i) for i in self.scrubbers], 'postage': strng.to_text(self.postage), 'date': strng.to_text(self.date), 'version': strng.to_text(self.version), 'revision': strng.to_text(self.revision), 'publickey': strng.to_text(self.publickey), 'signature': strng.to_text(self.signature), }
def create_archive_folder(group_key_id, force_path_id=None): group_key_alias, group_creator_idurl = my_keys.split_key_id(group_key_id) catalog_path = os.path.join('.archive', group_key_alias) archive_folder_catalog_path = global_id.MakeGlobalID( key_alias=group_key_alias, customer=group_creator_idurl.to_id(), path=catalog_path) res = api.file_exists(archive_folder_catalog_path) if res['status'] != 'OK': lg.err('failed to check archive folder in the catalog: %r' % res) return None if res['result']['exist']: ret = res['result']['path_id'] if force_path_id is not None: if force_path_id != ret: lg.err('archive folder exists, but have different path ID in the catalog: %r' % ret) return None return ret res = api.file_create(archive_folder_catalog_path, as_folder=True, exist_ok=True, force_path_id=force_path_id) if res['status'] != 'OK': lg.err('failed to create archive folder in the catalog: %r' % res) return None if res['result']['created']: lg.info('created new archive folder in the catalog: %r' % res) else: lg.info('archive folder already exist in the catalog: %r' % res) ret = res['result']['path_id'] if force_path_id is not None: if force_path_id != ret: lg.err('archive folder exists, but have different path ID in the catalog: %r' % ret) return None return ret
def SendListFiles(target_supplier, customer_idurl=None, key_id=None, wide=False, callbacks={}): """ This is used as a request method from your supplier : if you send him a ListFiles() packet he will reply you with a list of stored files in a Files() packet. """ MyID = my_id.getLocalID() if not customer_idurl: customer_idurl = MyID if not str(target_supplier).isdigit(): RemoteID = target_supplier else: RemoteID = contactsdb.supplier(target_supplier, customer_idurl=customer_idurl) if not RemoteID: lg.warn("RemoteID is empty target_supplier=%s" % str(target_supplier)) return None if _Debug: lg.out(_DebugLevel, "p2p_service.SendListFiles to %s" % nameurl.GetName(RemoteID)) if not key_id: key_id = global_id.MakeGlobalID(idurl=customer_idurl, key_alias='customer') PacketID = "%s:%s" % (key_id, packetid.UniqueID(), ) Payload = settings.ListFilesFormat() result = signed.Packet( Command=commands.ListFiles(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=RemoteID, ) gateway.outbox(result, wide=wide, callbacks=callbacks) return result
def do_restore_key(key_id, is_private, keys_folder=None, wait_result=False): """ Restore given key from my suppliers if I do not have it locally. """ if _Debug: lg.out(_DebugLevel, 'key_ring.do_restore_key key_id=%r is_private=%r' % (key_id, is_private, )) if my_keys.is_key_registered(key_id): lg.err('local key already exist: "%s"' % key_id) if wait_result: return fail(Exception('local key already exist: "%s"' % key_id)) return False if not keys_folder: keys_folder = settings.KeyStoreDir() if is_private: remote_path_for_key = '.keys/%s.private' % key_id else: remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) ret = api.file_download_start( remote_path=global_key_path, destination_path=keys_folder, wait_result=True, open_share=False, ) if not isinstance(ret, Deferred): lg.err('failed to download key "%s": %s' % (key_id, ret)) if wait_result: return fail(Exception('failed to download key "%s": %s' % (key_id, ret))) return False result = Deferred() def _on_result(res): if not isinstance(res, dict): lg.err('failed to download key "%s": %s' % (key_id, res)) if wait_result: result.errback(Exception('failed to download key "%s": %s' % (key_id, res))) return None if res['status'] != 'OK': lg.err('failed to download key "%s": %r' % (key_id, res)) if wait_result: result.errback(Exception('failed to download key "%s": %r' % (key_id, res))) return None if not my_keys.load_key(key_id, keys_folder): lg.err('failed to read key "%s" from local folder "%s"' % (key_id, keys_folder)) if wait_result: result.errback(Exception('failed to read key "%s" from local folder "%s"' % (key_id, keys_folder))) return None if _Debug: lg.out(_DebugLevel, 'key_ring.do_restore_key._on_result key_id=%s is_private=%r : %r' % (key_id, is_private, res)) if wait_result: result.callback(res) return None ret.addBoth(_on_result) if not wait_result: return True return result
def is_key_private(key_id, include_master=True): if not is_key_registered(key_id): return False if include_master and key_id == global_id.MakeGlobalID( idurl=my_id.getLocalID(), key_alias='master'): return True return not key_obj(key_id).isPublic()
def encrypt(self, message_body, encrypt_session_func=None): new_sessionkey = key.NewSessionKey() if not encrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with "%s" key' % self.recipient) encrypt_session_func = lambda inp: my_keys.encrypt(self.recipient, inp) if not encrypt_session_func: glob_id = global_id.ParseGlobalID(self.recipient) if glob_id['key_alias'] == 'master': if glob_id['idurl'] == my_id.getLocalID(): lg.warn('making private message addressed to me ???') if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with "master" key') encrypt_session_func = lambda inp: my_keys.encrypt('master', inp) else: remote_identity = identitycache.FromCache(glob_id['idurl']) if not remote_identity: raise Exception('remote identity is not cached yet, not able to encrypt the message') if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with remote identity public key') encrypt_session_func = remote_identity.encrypt else: own_key = global_id.MakeGlobalID(idurl=my_id.getLocalID(), key_alias=glob_id['key_alias']) if my_keys.is_key_registered(own_key): if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with "%s" key' % own_key) encrypt_session_func = lambda inp: my_keys.encrypt(own_key, inp) if not encrypt_session_func: raise Exception('can not find key for given recipient') self.encrypted_session = encrypt_session_func(new_sessionkey) self.encrypted_body = key.EncryptWithSessionKey(new_sessionkey, message_body) return self.encrypted_session, self.encrypted_body
def backup_outgoing_message(private_message_object, message_id): """ """ if not driver.is_on('service_backups'): lg.warn('service_backups is not started') return False serialized_message = private_message_object.serialize() local_msg_folder = os.path.join(settings.ChatChannelsDir(), private_message_object.recipient, 'out') if not bpio._dir_exist(local_msg_folder): bpio._dirs_make(local_msg_folder) local_msg_filename = os.path.join(local_msg_folder, message_id) if not bpio.WriteBinaryFile(local_msg_filename, serialized_message): lg.warn('failed writing outgoing message locally') return False remote_path_for_message = os.path.join('.messages', 'out', private_message_object.recipient, message_id) global_message_path = global_id.MakeGlobalID(customer=messages_key_id(), path=remote_path_for_message) res = api.file_create(global_message_path) if res['status'] != 'OK': lg.warn('failed to create path "%s" in the catalog: %s' % (global_message_path, res['errors'])) return False res = api.file_upload_start(local_msg_filename, global_message_path, wait_result=False) if res['status'] != 'OK': lg.warn('failed to upload message "%s": %s' % (global_message_path, res['errors'])) return False return True
def _do_check_sync_keys(self, result): from logs import lg from interface import api from storage import keys_synchronizer from userid import global_id from userid import my_id self.sync_keys_requested = False global_keys_folder_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path='.keys') res = api.file_exists(global_keys_folder_path) if res['status'] != 'OK' or not res['result'] or not res['result'].get( 'exist'): res = api.file_create(global_keys_folder_path, as_folder=True) if res['status'] != 'OK': lg.err( 'failed to create ".keys" folder "%s" in the catalog: %r' % (global_keys_folder_path, res)) result.errback( Exception( 'failed to create keys folder "%s" in the catalog: %r' % (global_keys_folder_path, res))) return lg.info('created new remote folder ".keys" in the catalog: %r' % global_keys_folder_path) keys_synchronizer.A('sync', result)
def populate_messages(recipient_id=None, sender_id=None, message_types=[], offset=0, limit=100): if recipient_id: if not recipient_id.count('@'): from contacts import contactsdb recipient_idurl = contactsdb.find_correspondent_by_nickname( recipient_id) if not recipient_idurl: lg.err('recipient %r was not found' % recipient_id) return recipient_id = global_id.UrlToGlobalID(recipient_idurl) recipient_glob_id = global_id.ParseGlobalID(recipient_id) if not recipient_glob_id['idurl']: lg.err('wrong recipient_id') return recipient_id = global_id.MakeGlobalID(**recipient_glob_id) if not my_keys.is_valid_key_id(recipient_id): lg.err('invalid recipient_id: %s' % recipient_id) return if sender_id: sender_local_key_id = my_keys.get_local_key_id(sender_id) if sender_local_key_id is None: return if recipient_id: recipient_local_key_id = my_keys.get_local_key_id(recipient_id) if recipient_local_key_id is None: lg.warn('recipient %r local key id was not registered' % recipient_id) return for row in query_messages( sender_id=sender_id, recipient_id=recipient_id, bidirectional=False, message_types=message_types, offset=offset, limit=limit, raw_results=True, ): conversation_id = get_conversation_id(row[0], row[2], int(row[5])) if conversation_id is None: continue snap_id = '{}/{}'.format(conversation_id, row[7]) listeners.push_snapshot('message', snap_id=snap_id, created=row[6], data=build_json_message( sender=row[1], recipient=row[3], direction='in' if row[4] == 0 else 'out', conversation_id=conversation_id, message_type=MESSAGE_TYPE_CODES.get( int(row[5]), 'private_message'), message_time=row[6], message_id=row[7], data=json.loads(row[8]), ))
def is_key_registered(key_id, include_master=True): """ Returns True if this key is known. """ if include_master and key_id == global_id.MakeGlobalID( idurl=my_id.getLocalID(), key_alias='master'): return True return key_id in known_keys()
def doCancelRequests(self, arg): """ Action method. """ packetID = global_id.MakeGlobalID(idurl=my_id.getLocalID(), path=settings.BackupIndexFileName()) packetsToCancel = packet_out.search_by_backup_id(packetID) for pkt_out in packetsToCancel: if pkt_out.outpacket.Command == commands.Retrieve(): lg.warn('sending "cancel" to %s' % pkt_out) pkt_out.automat('cancel')
def doSuppliersSendIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersSendIndexFile') packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) self.sending_suppliers.clear() self.sent_suppliers_number = 0 src = bpio.ReadBinaryFile(settings.BackupIndexFilePath()) localID = my_id.getLocalID() b = encrypted.Block( localID, packetID, 0, key.NewSessionKey(), key.SessionKeyType(), True, src, ) Payload = b.Serialize() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue newpacket, pkt_out = p2p_service.SendData( raw_data=Payload, ownerID=localID, creatorID=localID, remoteID=supplierId, packetID=packetID, callbacks={ commands.Ack(): self._on_supplier_acked, commands.Fail(): self._on_supplier_acked, }, ) # newpacket = signed.Packet( # commands.Data(), localID, localID, packetID, # Payload, supplierId) # pkt_out = gateway.outbox(newpacket, callbacks={ # commands.Ack(): self._on_supplier_acked, # commands.Fail(): self._on_supplier_acked, }) if pkt_out: self.sending_suppliers.add(supplierId) self.sent_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (newpacket, nameurl.GetName(supplierId)))
def doSuppliersRequestIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersRequestIndexFile') if driver.is_on('service_backups'): from storage import backup_control self.current_local_revision = backup_control.revision() else: self.current_local_revision = -1 self.latest_supplier_revision = -1 self.requesting_suppliers.clear() self.requested_suppliers_number = 0 packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) # packetID = settings.BackupIndexFileName() localID = my_id.getLocalID() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue pkt_out = p2p_service.SendRetreive(localID, localID, packetID, supplierId, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_response, }) # newpacket = signed.Packet( # commands.Retrieve(), # localID, # localID, # packetid.RemotePath(packetID), # '', # supplierId) # pkt_out = gateway.outbox(newpacket, callbacks={ # commands.Data(): self._on_supplier_response, # commands.Fail(): self._on_supplier_response, }) if pkt_out: self.requesting_suppliers.add(supplierId) self.requested_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplierId)))
def doSuppliersSendIndexFile(self, *args, **kwargs): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersSendIndexFile') packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) self.sending_suppliers.clear() self.outgoing_packets_ids = [] self.sent_suppliers_number = 0 localID = my_id.getIDURL() b = encrypted.Block( CreatorID=localID, BackupID=packetID, BlockNumber=0, SessionKey=key.NewSessionKey( session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=bpio.ReadBinaryFile(settings.BackupIndexFilePath()), ) Payload = b.Serialize() for supplier_idurl in contactsdb.suppliers(): if not supplier_idurl: continue sc = supplier_connector.by_idurl(supplier_idurl) if sc is None or sc.state != 'CONNECTED': continue if online_status.isOffline(supplier_idurl): continue newpacket, pkt_out = p2p_service.SendData( raw_data=Payload, ownerID=localID, creatorID=localID, remoteID=supplier_idurl, packetID=packetID, callbacks={ commands.Ack(): self._on_supplier_acked, commands.Fail(): self._on_supplier_acked, }, ) if pkt_out: self.sending_suppliers.add(supplier_idurl) self.sent_suppliers_number += 1 self.outgoing_packets_ids.append(packetID) if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (newpacket, nameurl.GetName(supplier_idurl)))
def open_queue(queue_id): global _ActiveQueues if not valid_queue_id(queue_id): raise Exception('invalid queue id') if queue_id in queue(): raise Exception('queue already exist') queue_info = global_id.ParseGlobalQueueID(queue_id) customer_key_id = global_id.MakeGlobalID(customer=queue_info['owner_id'], key_alias='customer') if not my_keys.is_key_registered(customer_key_id): raise Exception('customer key for given queue not found') _ActiveQueues[queue_id] = OrderedDict() lg.info('new queue opened %s based on key %s' % (queue_id, customer_key_id)) return True
def _on_key_erased(self, evt): from interface import api from userid import global_id from userid import my_id if evt.data['is_private']: remote_path_for_key = '.keys/%s.private' % evt.data['key_id'] else: remote_path_for_key = '.keys/%s.public' % evt.data['key_id'] global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key, ) api.file_delete(global_key_path) self._do_synchronize_keys()
def doRequestQueueService(self, *args, **kwargs): """ Action method. """ if not self.queue_subscribe: reactor.callLater(0, self.automat, 'queue-skip') # @UndefinedVariable return service_info = { 'items': [ { 'scope': 'consumer', 'action': 'start', 'consumer_id': strng.to_text(my_id.getGlobalID()), }, { 'scope': 'consumer', 'action': 'add_callback', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'method': strng.to_text(my_id.getLocalID()), }, { 'scope': 'consumer', 'action': 'subscribe', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'queue_id': global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=my_id.getGlobalID(), supplier_id=global_id.MakeGlobalID( idurl=self.supplier_idurl), ), }, ], } request = p2p_service.SendRequestService( remote_idurl=self.supplier_idurl, service_name='service_p2p_notifications', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_queue_acked, commands.Fail(): self._supplier_queue_failed, }, ) self.request_queue_packet_id = request.PacketID
def check_rename_my_keys(): """ Make sure all my keys have correct names according to known latest identities I have cached. For every key checks corresponding IDURL info and decides to rename it if key owner's identity was rotated. """ keys_to_be_renamed = {} for key_id in list(my_keys.known_keys().keys()): key_glob_id = global_id.ParseGlobalID(key_id) owner_idurl = key_glob_id['idurl'] if not owner_idurl.is_latest(): keys_to_be_renamed[key_id] = global_id.MakeGlobalID( idurl=owner_idurl.to_bin(), key_alias=key_glob_id['key_alias'], ) for current_key_id, new_key_id in keys_to_be_renamed.items(): my_keys.rename_key(current_key_id, new_key_id)
def _on_my_keys_synchronize_failed(self, evt): from logs import lg from main import config from interface import api from userid import global_id from userid import my_id if not config.conf().getBool( 'services/keys-storage/reset-unreliable-backup-copies'): return global_keys_folder_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path='.keys') lg.info('about to erase ".keys" folder in the catalog: %r' % global_keys_folder_path) res = api.file_delete(global_keys_folder_path) if res['status'] == 'OK': api.network_reconnect()
def latest_key_id(key_id): """ Create IDURL object from input key_id and return new key_id (with same key_alias) from that IDURL object. This way you can be sure that given key_id is pointing to the correct owner IDURL. """ if not key_id: return key_id if key_id == 'master': return my_id.getGlobalID(key_alias='master') glob_id = global_id.ParseGlobalID(key_id, as_field=True) if not glob_id['idurl']: lg.err('invalid key_id: %r' % key_id) return key_id return global_id.MakeGlobalID( idurl=glob_id['idurl'].to_bin(), key_alias=glob_id['key_alias'], )
def do_delete_key(key_id, is_private): """ Remove given key from my suppliers nodes. """ if is_private: remote_path_for_key = '.keys/%s.private' % key_id else: remote_path_for_key = '.keys/%s.public' % key_id global_key_path = global_id.MakeGlobalID( key_alias='master', customer=my_id.getGlobalID(), path=remote_path_for_key) res = api.file_delete(global_key_path) if res['status'] != 'OK': lg.err('failed to delete key "%s": %r' % (global_key_path, res)) return False if _Debug: lg.out(_DebugLevel, 'key_ring.do_delete_key key_id=%s is_private=%r : %r' % (key_id, is_private, res)) return True
def doRequestQueueService(self, arg): """ Action method. """ if not self.queue_subscribe: self.automat('fail') return service_info = { 'items': [ { 'scope': 'consumer', 'action': 'start', 'consumer_id': my_id.getGlobalID(), }, { 'scope': 'consumer', 'action': 'add_callback', 'consumer_id': my_id.getGlobalID(), 'method': my_id.getLocalID(), }, { 'scope': 'consumer', 'action': 'subscribe', 'consumer_id': my_id.getGlobalID(), 'queue_id': global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=my_id.getGlobalID(), supplier_id=global_id.MakeGlobalID( idurl=self.supplier_idurl), ), }, ], } p2p_service.SendRequestService( remote_idurl=self.supplier_idurl, service_name='service_p2p_notifications', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_acked, commands.Fail(): self._supplier_failed, }, )
def on_customer_accepted(evt): customer_idurl = id_url.field(evt.data.get('idurl')) if not customer_idurl: lg.warn('unknown customer idurl in event data payload') return False customer_glob_id = global_id.idurl2glob(customer_idurl) queue_id = global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=customer_glob_id, supplier_id=my_id.getGlobalID(), ) if not p2p_queue.is_queue_exist(queue_id): customer_key_id = global_id.MakeGlobalID(customer=customer_glob_id, key_alias='customer') if my_keys.is_key_registered(customer_key_id): try: p2p_queue.open_queue(queue_id) except Exception as exc: lg.warn('failed to open queue %s : %s' % (queue_id, str(exc))) else: lg.warn('customer key %r for supplier queue not registered' % customer_key_id) if p2p_queue.is_queue_exist(queue_id): if not p2p_queue.is_producer_exist(my_id.getGlobalID()): try: p2p_queue.add_producer(my_id.getGlobalID()) except Exception as exc: lg.warn('failed to add producer: %s' % str(exc)) if p2p_queue.is_producer_exist(my_id.getGlobalID()): if not p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): try: p2p_queue.connect_producer(my_id.getGlobalID(), queue_id) except Exception as exc: lg.warn('failed to connect producer: %s' % str(exc)) if p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): if not p2p_queue.is_event_publishing(my_id.getGlobalID(), 'supplier-file-modified'): try: p2p_queue.start_event_publisher( my_id.getGlobalID(), 'supplier-file-modified') except Exception as exc: lg.warn('failed to start event publisher: %s' % str(exc)) return True
def LatestBackupID(backupID): """ Create IDURL object from input key_id and return new key_id (with same key_alias) from that IDURL object. This way you can be sure that given key_id is pointing to the correct owner IDURL. """ if not backupID: return backupID from userid import global_id glob_id = global_id.ParseGlobalID(backupID, as_field=True) if not glob_id['idurl']: from logs import lg lg.err('invalid backupID: %r' % backupID) return backupID return global_id.MakeGlobalID( key_id=glob_id['key_id'], path=glob_id['path'], version=glob_id['version'], )
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from logs import lg from main import settings from contacts import contactsdb from userid import my_id from userid import global_id from storage import backup_control from p2p import commands if newpacket.Command == commands.Data(): if newpacket.OwnerID != my_id.getLocalID(): # only catch data belongs to me return False lg.out( self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s" % ( newpacket, newpacket.RemoteID, )) if newpacket.PacketID == global_id.MakeGlobalID( idurl=my_id.getLocalID(), path=settings.BackupIndexFileName(), ): # TODO: move to service_backup_db backup_control.IncomingSupplierBackupIndex(newpacket) return True if newpacket.Command == commands.Files(): if not newpacket.PacketID.startswith(my_id.getGlobalID() + ':'): # skip Files() which are from another customer return False if not contactsdb.is_supplier(newpacket.OwnerID): # skip Files() if this is not my supplier return False lg.out( self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s" % ( newpacket, newpacket.RemoteID, )) return backup_control.IncomingSupplierListFiles(newpacket) return False