def on_delete_file(newpacket): # TODO: call verify_packet_ownership() if not newpacket.Payload: ids = [ newpacket.PacketID, ] else: ids = strng.to_text(newpacket.Payload).split('\n') filescount = 0 dirscount = 0 lg.warn('going to erase files: %s' % ids) customer_id = global_id.UrlToGlobalID(newpacket.OwnerID) for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['customer']: glob_path = global_id.ParseGlobalID(customer_id + ':' + pcktID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if customer_id != glob_path['customer']: lg.warn('trying to delete file stored for another cusomer') continue # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'not a customer, or file not found') return False if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) # if self.publish_event_supplier_file_modified: # events.send('supplier-file-modified', data=dict( # action='delete', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) if _Debug: lg.dbg( _DebugLevel, "from [%s] with %d IDs, %d files and %d folders were removed" % (newpacket.OwnerID, len(ids), filescount, dirscount)) p2p_service.SendAck(newpacket) return True
def _on_delete_file(self, newpacket): import os from logs import lg from system import bpio from lib import strng from userid import global_id from p2p import p2p_service from main import events if not newpacket.Payload: ids = [newpacket.PacketID, ] else: ids = strng.to_text(newpacket.Payload).split('\n') filescount = 0 dirscount = 0 lg.warn('going to erase files: %s' % ids) customer_id = global_id.UrlToGlobalID(newpacket.OwnerID) for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['customer']: glob_path = global_id.ParseGlobalID(customer_id + ':' + pcktID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if customer_id != glob_path['customer']: lg.warn('trying to delete file stored for another cusomer') continue # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'not a customer, or file not found') return False if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) if self.publish_event_supplier_file_modified: events.send('supplier-file-modified', data=dict( action='delete', glob_path=glob_path['path'], owner_id=newpacket.OwnerID, )) lg.out(self.debug_level, "service_supplier._on_delete_file from [%s] with %d IDs, %d files and %d folders were removed" % ( newpacket.OwnerID, len(ids), filescount, dirscount)) p2p_service.SendAck(newpacket) return True
def DeleteFile(request): """ Delete one ore multiple files (that belongs to another user) or folders on my machine. """ if _Debug: lg.out( _DebugLevel, 'p2p_service.DeleteFile [%s] by %s | %s' % (request.PacketID, request.OwnerID, request.CreatorID)) if not driver.is_on('service_supplier'): return SendFail(request, 'supplier service is off') if request.Payload == '': ids = [request.PacketID] else: ids = request.Payload.split('\n') filescount = 0 dirscount = 0 for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID(my_id.getGlobalID() + ':' + request.PacketID) if not glob_path['path']: lg.warn("got incorrect PacketID") SendFail(request, 'incorrect PacketID') return # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = makeFilename(request.OwnerID, glob_path['path']) if filename == "": filename = constructFilename(request.OwnerID, glob_path['path']) if not os.path.exists(filename): lg.warn( "had unknown customer: %s or pathID is not correct or not exist: %s" % (nameurl.GetName(request.OwnerID), glob_path['path'])) return SendFail(request, 'not a customer, or file not found') if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) if _Debug: lg.out( _DebugLevel, "p2p_service.DeleteFile from [%s] with %d IDs, %d files and %d folders were removed" % (nameurl.GetName( request.OwnerID), len(ids), filescount, dirscount)) SendAck(request)
def valid_queue_id(queue_id): try: str(queue_id) except: return False queue_info = global_id.ParseGlobalQueueID(queue_id) if not misc.ValidName(queue_info['queue_alias']): return False owner_id = global_id.ParseGlobalID(queue_info['owner_id']) if not owner_id['idurl']: return False supplier_id = global_id.ParseGlobalID(queue_info['supplier_id']) if not supplier_id['idurl']: return False return True
def __init__(self, parent, callOnReceived, creatorID, packetID, ownerID, remoteID, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Builds `file_down()` state machine. """ self.parent = parent self.callOnReceived = [] self.callOnReceived.append(callOnReceived) self.creatorID = creatorID self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename(packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.fileName = fileName self.ownerID = ownerID self.remoteID = remoteID self.requestTime = None self.fileReceivedTime = None self.requestTimeout = max(30, 2 * int(settings.getBackupBlockSize() / settings.SendingSpeedLimit())) self.result = '' self.created = utime.get_sec1970() super(FileDown, self).__init__( name="file_down_%s_%s/%s/%s" % (nameurl.GetName(self.remoteID), remotePath, versionName, fileName), state="AT_STARTUP", debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs )
def doSavePacket(self, *args, **kwargs): """ Action method. """ if not args or not args[0]: raise Exception('no input found') NewPacket, PacketID = args[0] glob_path = global_id.ParseGlobalID(PacketID, detect_version=True) packetID = global_id.CanonicalID(PacketID) customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull(packetID) if dataORparity == 'Data': self.OnHandData[SupplierNumber] = True elif dataORparity == 'Parity': self.OnHandParity[SupplierNumber] = True if not NewPacket: lg.warn('packet %r already exists locally' % packetID) return filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path']) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): try: bpio._dirs_make(dirpath) except: lg.exc() # either way the payload of packet is saved if not bpio.WriteBinaryFile(filename, NewPacket.Payload): lg.err("unable to write to %s" % filename) return if self.packetInCallback is not None: self.packetInCallback(self.backup_id, NewPacket) if _Debug: lg.out(_DebugLevel, "restore_worker.doSavePacket %s saved to %s" % (packetID, filename))
def set_path_id(self, pathID): parts = global_id.ParseGlobalID(pathID) self.pathID = pathID # source path to backup self.customerGlobID = parts['customer'] self.customerIDURL = parts['idurl'] self.remotePath = parts['path'] # here it must be in 0/1/2 form return parts
def doSavePacket(self, NewPacket): glob_path = global_id.ParseGlobalID(NewPacket.PacketID, detect_version=True) packetID = global_id.CanonicalID(NewPacket.PacketID) customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull( packetID) if dataORparity == 'Data': self.OnHandData[SupplierNumber] = True elif NewPacket.DataOrParity() == 'Parity': self.OnHandParity[SupplierNumber] = True filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path']) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): try: bpio._dirs_make(dirpath) except: lg.exc() # either way the payload of packet is saved if not bpio.WriteFile(filename, NewPacket.Payload): lg.warn("unable to write to %s" % filename) return if self.packetInCallback is not None: self.packetInCallback(self.BackupID, NewPacket) lg.out(6, "restore.doSavePacket %s saved to %s" % (packetID, filename))
def __init__(self, key_id, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Create shared_access_coordinator() state machine. Use this method if you need to call Automat.__init__() in a special way. """ self.key_id = key_id self.glob_id = global_id.ParseGlobalID(self.key_id) self.customer_idurl = self.glob_id['idurl'] self.known_suppliers_list = [] self.known_ecc_map = None super(SharedAccessCoordinator, self).__init__( name="%s$%s" % (self.glob_id['key_alias'], self.glob_id['user']), state='AT_STARTUP', debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs )
def encrypt(self, message_body, encrypt_session_func=None): new_sessionkey = key.NewSessionKey() if not encrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with "%s" key' % self.recipient) encrypt_session_func = lambda inp: my_keys.encrypt(self.recipient, inp) if not encrypt_session_func: glob_id = global_id.ParseGlobalID(self.recipient) if glob_id['key_alias'] == 'master': if glob_id['idurl'] == my_id.getLocalID(): lg.warn('making private message addressed to me ???') if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with "master" key') encrypt_session_func = lambda inp: my_keys.encrypt('master', inp) else: remote_identity = identitycache.FromCache(glob_id['idurl']) if not remote_identity: raise Exception('remote identity is not cached yet, not able to encrypt the message') if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with remote identity public key') encrypt_session_func = remote_identity.encrypt else: own_key = global_id.MakeGlobalID(idurl=my_id.getLocalID(), key_alias=glob_id['key_alias']) if my_keys.is_key_registered(own_key): if _Debug: lg.out(_DebugLevel, 'message.PrivateMessage.encrypt with "%s" key' % own_key) encrypt_session_func = lambda inp: my_keys.encrypt(own_key, inp) if not encrypt_session_func: raise Exception('can not find key for given recipient') self.encrypted_session = encrypt_session_func(new_sessionkey) self.encrypted_body = key.EncryptWithSessionKey(new_sessionkey, message_body) return self.encrypted_session, self.encrypted_body
def _on_list_files(self, newpacket): from main import settings if newpacket.Payload != settings.ListFilesFormat(): return False # TODO: perform validations before sending back list of files from supplier import list_files from crypt import my_keys from userid import global_id list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if list_files_global_id['key_id']: # customer id and data id can be recognized from packet id # return back list of files according to the request customer_idurl = list_files_global_id['idurl'] key_id = list_files_global_id['key_id'] else: # packet id format is unknown # by default returning back all files from that recipient if he is a customer customer_idurl = newpacket.OwnerID key_id = my_keys.make_key_id(alias='customer', creator_idurl=customer_idurl) list_files.send( customer_idurl=customer_idurl, packet_id=newpacket.PacketID, format_type=settings.ListFilesFormat(), key_id=key_id, remote_idurl=newpacket.OwnerID, # send back to the requestor ) return True
def decrypt(self, decrypt_session_func=None): if _Debug: lg.args(_DebugLevel, decrypt_session_func=decrypt_session_func, recipient=self.recipient) if not decrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.dbg(_DebugLevel, 'decrypt with registered key %r' % self.recipient) decrypt_session_func = lambda inp: my_keys.decrypt( self.recipient, inp) if not decrypt_session_func: glob_id = global_id.ParseGlobalID(self.recipient) if glob_id['idurl'] == my_id.getIDURL(): if glob_id['key_alias'] == 'master': if _Debug: lg.dbg( _DebugLevel, 'decrypt with my master key %r' % self.recipient) decrypt_session_func = lambda inp: my_keys.decrypt( 'master', inp) if not decrypt_session_func: raise Exception('can not find key for given recipient: %s' % self.recipient) decrypted_sessionkey = decrypt_session_func(self.encrypted_session) return key.DecryptWithSessionKey(decrypted_sessionkey, self.encrypted_body, session_key_type=key.SessionKeyType())
def on_files_received(newpacket, info): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False if list_files_global_id['idurl'] != my_id.getLocalID(): # ignore Files() if this is another customer if _Debug: lg.dbg( _DebugLevel, 'ignore incoming %r which is owned by another customer' % newpacket) return False if not contactsdb.is_supplier(newpacket.OwnerID): # ignore Files() if this is not my supplier if _Debug: lg.dbg( _DebugLevel, 'incoming %r received, but %r is not my supplier' % ( newpacket, newpacket.OwnerID, )) return False if _Debug: lg.args( _DebugLevel, "service_backups._on_inbox_packet_received: %r for us from %s at %s" % (newpacket, newpacket.CreatorID, info)) if IncomingSupplierListFiles(newpacket, list_files_global_id): p2p_service.SendAck(newpacket) else: p2p_service.SendFail(newpacket) return True
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from logs import lg from contacts import contactsdb from userid import my_id from userid import global_id from storage import backup_control from p2p import commands from p2p import p2p_service if newpacket.Command == commands.Files(): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False if list_files_global_id['idurl'] != my_id.getLocalIDURL(): # lg.warn('skip %s which is from another customer' % newpacket) return False if not contactsdb.is_supplier(newpacket.OwnerID): lg.warn('%s came, but %s is not my supplier' % (newpacket, newpacket.OwnerID, )) # skip Files() if this is not my supplier return False lg.out(self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s at %s" % ( newpacket, newpacket.CreatorID, info)) if backup_control.IncomingSupplierListFiles(newpacket, list_files_global_id): # send ack packet back p2p_service.SendAck(newpacket) else: p2p_service.SendFail(newpacket) return True return False
def decrypt(self, decrypt_session_func=None): if not decrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.out( _DebugLevel, 'message.PrivateMessage.decrypt with "%s" key' % self.recipient) decrypt_session_func = lambda inp: my_keys.decrypt( self.recipient, inp) if not decrypt_session_func: glob_id = global_id.ParseGlobalID(self.recipient) if glob_id['idurl'] == my_id.getLocalID(): if glob_id['key_alias'] == 'master': if _Debug: lg.out( _DebugLevel, 'message.PrivateMessage.decrypt with "master" key') decrypt_session_func = lambda inp: my_keys.decrypt( 'master', inp) if not decrypt_session_func: raise Exception('can not find key for given recipient: %s' % self.recipient) decrypted_sessionkey = decrypt_session_func(self.encrypted_session) return key.DecryptWithSessionKey(decrypted_sessionkey, self.encrypted_body)
def __init__(self, fileName, packetID, remoteID, ownerID, callOnAck=None, callOnFail=None): self.fileName = fileName try: self.fileSize = os.path.getsize(os.path.abspath(fileName)) except: lg.exc() self.fileSize = 0 self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, _ = packetid.SplitVersionFilename( packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.remoteID = remoteID self.ownerID = ownerID self.callOnAck = callOnAck self.callOnFail = callOnFail self.sendTime = None self.ackTime = None self.sendTimeout = 10 * 2 * ( max(int(self.fileSize / settings.SendingSpeedLimit()), 5) + 5 ) # maximum 5 seconds to get an Ack self.result = '' self.created = utime.get_sec1970() PacketReport('send', self.remoteID, self.packetID, 'init')
def populate_messages(recipient_id=None, sender_id=None, message_types=[], offset=0, limit=100): if recipient_id: if not recipient_id.count('@'): from contacts import contactsdb recipient_idurl = contactsdb.find_correspondent_by_nickname( recipient_id) if not recipient_idurl: lg.err('recipient %r was not found' % recipient_id) return recipient_id = global_id.UrlToGlobalID(recipient_idurl) recipient_glob_id = global_id.ParseGlobalID(recipient_id) if not recipient_glob_id['idurl']: lg.err('wrong recipient_id') return recipient_id = global_id.MakeGlobalID(**recipient_glob_id) if not my_keys.is_valid_key_id(recipient_id): lg.err('invalid recipient_id: %s' % recipient_id) return if sender_id: sender_local_key_id = my_keys.get_local_key_id(sender_id) if sender_local_key_id is None: return if recipient_id: recipient_local_key_id = my_keys.get_local_key_id(recipient_id) if recipient_local_key_id is None: lg.warn('recipient %r local key id was not registered' % recipient_id) return for row in query_messages( sender_id=sender_id, recipient_id=recipient_id, bidirectional=False, message_types=message_types, offset=offset, limit=limit, raw_results=True, ): conversation_id = get_conversation_id(row[0], row[2], int(row[5])) if conversation_id is None: continue snap_id = '{}/{}'.format(conversation_id, row[7]) listeners.push_snapshot('message', snap_id=snap_id, created=row[6], data=build_json_message( sender=row[1], recipient=row[3], direction='in' if row[4] == 0 else 'out', conversation_id=conversation_id, message_type=MESSAGE_TYPE_CODES.get( int(row[5]), 'private_message'), message_time=row[6], message_id=row[7], data=json.loads(row[8]), ))
def set_path_id(self, pathID): parts = global_id.ParseGlobalID(pathID) self.pathID = pathID # source path to backup self.customerGlobID = parts['customer'] self.customerIDURL = parts['idurl'] self.remotePath = parts['path'] # here it must be in 0/1/2 form if parts['key_alias']: self.set_key_id(my_keys.make_key_id(alias=parts['key_alias'], creator_glob_id=self.customerGlobID)) return parts
def is_valid_key_id(global_key_id): parts = global_id.ParseGlobalID(global_key_id) if not parts['key_alias']: lg.warn('no key_alias found in the input') return False if not parts['idurl']: lg.warn('no idurl found in the input') return False if not misc.ValidKeyAlias(parts['key_alias']): lg.warn('invalid key alias in the input') return False return True
def __init__(self, parent, fileName, packetID, remoteID, ownerID, callOnAck=None, callOnFail=None, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Builds `file_up()` state machine. """ self.parent = parent self.fileName = fileName try: self.fileSize = os.path.getsize(os.path.abspath(fileName)) except: lg.exc() self.fileSize = 0 self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename( packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.remoteID = remoteID self.ownerID = ownerID self.callOnAck = callOnAck self.callOnFail = callOnFail self.sendTime = None self.ackTime = None self.sendTimeout = 10 * 2 * ( max(int(self.fileSize / settings.SendingSpeedLimit()), 5) + 5 ) # maximum 5 seconds to get an Ack self.result = '' self.created = utime.get_sec1970() super(FileUp, self).__init__(name="file_up_%s_%s/%s/%s" % (nameurl.GetName( self.remoteID), remotePath, versionName, fileName), state="AT_STARTUP", debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs)
def encrypt(self, message_body, encrypt_session_func=None): if _Debug: lg.args(_DebugLevel, encrypt_session_func=encrypt_session_func, recipient=self.recipient) new_sessionkey = key.NewSessionKey( session_key_type=key.SessionKeyType()) if not encrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.dbg(_DebugLevel, 'with registered key %r' % self.recipient) encrypt_session_func = lambda inp: my_keys.encrypt( self.recipient, inp) if not encrypt_session_func: glob_id = global_id.ParseGlobalID(self.recipient) if glob_id['key_alias'] == 'master': if glob_id['idurl'] == my_id.getIDURL(): lg.warn('making encrypted message addressed to me ?') encrypt_session_func = lambda inp: my_keys.encrypt( 'master', inp) else: remote_identity = identitycache.FromCache(glob_id['idurl']) if not remote_identity: raise Exception( 'remote identity is not cached yet, not able to encrypt the message' ) if _Debug: lg.dbg( _DebugLevel, 'with remote identity public key %r' % glob_id['idurl']) encrypt_session_func = remote_identity.encrypt else: own_key = global_id.MakeGlobalID( idurl=my_id.getIDURL(), key_alias=glob_id['key_alias']) if my_keys.is_key_registered(own_key): if _Debug: lg.dbg( _DebugLevel, 'with registered key (found by alias) %r' % own_key) encrypt_session_func = lambda inp: my_keys.encrypt( own_key, inp) if not encrypt_session_func: raise Exception('can not find key for given recipient') self.encrypted_session = encrypt_session_func(new_sessionkey) self.encrypted_body = key.EncryptWithSessionKey( new_sessionkey, message_body, session_key_type=key.SessionKeyType()) return self.encrypted_session, self.encrypted_body
def OnDataReceived(self, newpacket, result): # we requested some data from a supplier, and just received it if self.shutdown: lg.warn('skip, supplier queue is shutting down') self.StopAllRequests() return if _Debug: lg.args(_DebugLevel, newpacket=newpacket, result=result, queue=len(self.fileRequestQueue), remoteName=self.remoteName) packetID = global_id.CanonicalID(newpacket.PacketID) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): latest_idurl = global_id.ParseGlobalID( packetID, as_field=True)['idurl'].latest another_packetID = global_id.SubstitutePacketID(packetID, idurl=latest_idurl) if (another_packetID in self.fileRequestQueue) and (another_packetID in self.fileRequestDict): packetID = another_packetID lg.warn( 'found incoming %r with outdated packet id, corrected: %r' % ( newpacket, another_packetID, )) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): lg.err( 'unexpected %r received which is not in the downloading queue' % newpacket) else: f_down = self.fileRequestDict[packetID] if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() packet is not valid') f_down.event('fail-received', newpacket) return f_down.event('valid-data-received', wrapped_packet) elif newpacket.Command == commands.Fail(): f_down.event('fail-received', newpacket) else: lg.err('incorrect response command: %r' % newpacket)
def split_key_id(key_id): """ Return "alias" and "creator" IDURL of that key as a tuple object. For example from input string: "[email protected]" output will be like that: "secret_key_xyz", "http://remote-server.net/bob.xml" """ parts = global_id.ParseGlobalID(key_id) if not parts['key_alias'] or not parts['idurl']: return None, None return parts['key_alias'], id_url.field(parts['idurl'])
def check_rename_my_keys(): """ Make sure all my keys have correct names according to known latest identities I have cached. For every key checks corresponding IDURL info and decides to rename it if key owner's identity was rotated. """ keys_to_be_renamed = {} for key_id in list(my_keys.known_keys().keys()): key_glob_id = global_id.ParseGlobalID(key_id) owner_idurl = key_glob_id['idurl'] if not owner_idurl.is_latest(): keys_to_be_renamed[key_id] = global_id.MakeGlobalID( idurl=owner_idurl.to_bin(), key_alias=key_glob_id['key_alias'], ) for current_key_id, new_key_id in keys_to_be_renamed.items(): my_keys.rename_key(current_key_id, new_key_id)
def latest_key_id(key_id): """ Create IDURL object from input key_id and return new key_id (with same key_alias) from that IDURL object. This way you can be sure that given key_id is pointing to the correct owner IDURL. """ if not key_id: return key_id if key_id == 'master': return my_id.getGlobalID(key_alias='master') glob_id = global_id.ParseGlobalID(key_id, as_field=True) if not glob_id['idurl']: lg.err('invalid key_id: %r' % key_id) return key_id return global_id.MakeGlobalID( idurl=glob_id['idurl'].to_bin(), key_alias=glob_id['key_alias'], )
def _on_supplier_modified(self, evt): from access import key_ring from crypt import my_keys from userid import global_id from userid import my_id if evt.data['new_idurl']: my_keys_to_be_republished = [] for key_id in my_keys.known_keys(): if not key_id.startswith('share_'): continue _glob_id = global_id.ParseGlobalID(key_id) if _glob_id['idurl'] == my_id.getLocalIDURL(): my_keys_to_be_republished.append(key_id) for key_id in my_keys_to_be_republished: key_ring.transfer_key(key_id, trusted_idurl=evt.data['new_idurl'], include_private=False)
def DeleteBackup(request): """ Delete one or multiple backups on my machine. """ if not driver.is_on('service_supplier'): return SendFail(request, 'supplier service is off') if request.Payload == '': ids = [request.PacketID] else: ids = request.Payload.split('\n') count = 0 for bkpID in ids: glob_path = global_id.ParseGlobalID(bkpID) if not glob_path['path']: lg.warn("got incorrect backupID") SendFail(request, 'incorrect backupID') return # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = makeFilename(request.OwnerID, glob_path['path']) if filename == "": filename = constructFilename(request.OwnerID, glob_path['path']) if not os.path.exists(filename): lg.warn("had unknown customer: %s or backupID: %s" ( bkpID, request.OwnerID)) return SendFail(request, 'not a customer, or file not found') if os.path.isdir(filename): try: bpio._dir_remove(filename) count += 1 except: lg.exc() elif os.path.isfile(filename): try: os.remove(filename) count += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) SendAck(request) if _Debug: lg.out( _DebugLevel, "p2p_service.DeleteBackup from [%s] with %d IDs, %d were removed" % (nameurl.GetName(request.OwnerID), len(ids), count))
def _on_supplier_modified(self, evt): from logs import lg from access import key_ring from crypt import my_keys from userid import global_id from userid import my_id if evt.data['new_idurl']: my_keys_to_be_republished = [] for key_id in my_keys.known_keys(): if not key_id.startswith('group_'): continue _glob_id = global_id.ParseGlobalID(key_id) if _glob_id['idurl'] == my_id.getIDURL(): # only send public keys of my own groups my_keys_to_be_republished.append(key_id) for group_key_id in my_keys_to_be_republished: d = key_ring.transfer_key(group_key_id, trusted_idurl=evt.data['new_idurl'], include_private=False, include_signature=False) d.addErrback(lambda *a: lg.err('transfer key failed: %s' % str(*a)))
def LatestBackupID(backupID): """ Create IDURL object from input key_id and return new key_id (with same key_alias) from that IDURL object. This way you can be sure that given key_id is pointing to the correct owner IDURL. """ if not backupID: return backupID from userid import global_id glob_id = global_id.ParseGlobalID(backupID, as_field=True) if not glob_id['idurl']: from logs import lg lg.err('invalid backupID: %r' % backupID) return backupID return global_id.MakeGlobalID( key_id=glob_id['key_id'], path=glob_id['path'], version=glob_id['version'], )
def on_my_message(self, message): if message.startswith('!add '): idurl = message[5:] if global_id.IsValidGlobalUser(idurl): gid = global_id.ParseGlobalID(idurl) idurl = gid['idurl'] if idurl.strip() and idurl not in self.users: self.users.append(idurl) name = nameurl.GetName(idurl) self.history.append({ 'text': 'user "%s" was added to the channel' % name, 'name': '', 'time': time.time(), }) return if message.startswith('!find ') or message.startswith('!search '): _, _, inp = message.partition(' ') if not self.search_user_func: self.history.append({ 'text': 'search failed, method not defined', 'name': '', 'time': time.time(), }) return self.search_user_func(inp).addBoth(self.on_nickname_search_result) self.history.append({ 'text': 'looking for "%s" ...' % inp, 'name': '', 'time': time.time(), }) return self.history.append({ 'text': message, 'name': 'you', 'time': time.time(), }) if self.send_message_func is not None: for to in self.users: reactor.callFromThread(self.send_message_func, to, message)