def doSendHisFiles(self, *args, **kwargs): """ Action method. """ customer_key_id = my_keys.make_key_id( alias='customer', creator_idurl=self.customer_idurl) if my_keys.is_key_registered(customer_key_id): list_files.send( customer_idurl=self.customer_idurl, packet_id='%s:%s' % ( customer_key_id, packetid.UniqueID(), ), format_type=settings.ListFilesFormat(), key_id=customer_key_id, remote_idurl=self.customer_idurl, # send to the customer ) else: # if "customer" key is not delivered to me yet, use his "master" key list_files.send( customer_idurl=self.customer_idurl, packet_id='%s:%s' % ( customer_key_id, packetid.UniqueID(), ), format_type=settings.ListFilesFormat(), key_id=my_keys.make_key_id(alias='master', creator_idurl=self.customer_idurl), remote_idurl=self.customer_idurl, # send to the customer ) lg.err('key %s is not registered, not able to send his files' % customer_key_id)
def __init__(self, pathID, localPath=None, keyID=None): self.number = NewTaskNumber() # index number for the task self.created = time.time() self.backupID = None self.pathID = None self.fullGlobPath = None self.fullCustomerID = None self.customerGlobID = None self.customerIDURL = None self.remotePath = None self.keyID = None self.keyAlias = None self.result_defer = Deferred() self.result_defer.addCallback(OnTaskExecutedCallback) self.result_defer.addErrback(OnTaskFailedCallback) parts = self.set_path_id(pathID) self.set_key_id(keyID or my_keys.make_key_id(alias=parts['key_alias'], creator_glob_id=parts['customer'])) self.set_local_path(localPath) if _Debug: lg.out(_DebugLevel, 'new Task created: %r' % self) events.send('backup-task-created', data=dict( number=self.number, created=self.created, backup_id=self.backupID, key_id=self.keyID, path_id=self.pathID, customer_id=self.customerGlobID, path=self.remotePath, local_path=self.localPath, remote_path=self.fullGlobPath, ))
def _on_list_files(self, newpacket): from main import settings if newpacket.Payload != settings.ListFilesFormat(): return False # TODO: perform validations before sending back list of files from supplier import list_files from crypt import my_keys from userid import global_id list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if list_files_global_id['key_id']: # customer id and data id can be recognized from packet id # return back list of files according to the request customer_idurl = list_files_global_id['idurl'] key_id = list_files_global_id['key_id'] else: # packet id format is unknown # by default returning back all files from that recipient if he is a customer customer_idurl = newpacket.OwnerID key_id = my_keys.make_key_id(alias='customer', creator_idurl=customer_idurl) list_files.send( customer_idurl=customer_idurl, packet_id=newpacket.PacketID, format_type=settings.ListFilesFormat(), key_id=key_id, remote_idurl=newpacket.OwnerID, # send back to the requestor ) return True
def set_path_id(self, pathID): parts = global_id.ParseGlobalID(pathID) self.pathID = pathID # source path to backup self.customerGlobID = parts['customer'] self.customerIDURL = parts['idurl'] self.remotePath = parts['path'] # here it must be in 0/1/2 form if parts['key_alias']: self.set_key_id(my_keys.make_key_id(alias=parts['key_alias'], creator_glob_id=self.customerGlobID)) return parts
def doAuditUserMasterKey(self, *args, **kwargs): """ Action method. """ master_key_id = my_keys.make_key_id(alias='master', creator_idurl=self.remote_idurl) d = key_ring.audit_private_key(master_key_id, self.remote_idurl) d.addCallback(lambda audit_result: ( self.automat('audit-ok') if audit_result else self.automat('fail', Exception( 'remote user master key audit process failed')), )) d.addErrback(lambda err: self.automat('fail', err))
def generate_group_key(creator_id=None, label=None, key_size=4096, group_alias=None): group_key_id = None if group_alias: group_key_id = my_keys.make_key_id(alias=group_alias, creator_glob_id=creator_id) if my_keys.is_key_registered(group_key_id): return my_keys.latest_key_id(group_key_id) else: while True: random_sample = os.urandom(24) group_alias = 'group_%s' % strng.to_text(key.HashMD5(random_sample, hexdigest=True)) group_key_id = my_keys.make_key_id(alias=group_alias, creator_glob_id=creator_id) if my_keys.is_key_registered(group_key_id): continue break if not label: label = 'group%s' % utime.make_timestamp() my_keys.generate_key(key_id=group_key_id, label=label, key_size=key_size) my_keys.sign_key(key_id=group_key_id, save=True) if _Debug: lg.args(_DebugLevel, group_key_id=group_key_id, group_alias=group_alias, creator_id=creator_id, label=label) return group_key_id
def __init__(self, pathID, localPath=None, keyID=None): self.number = NewTaskNumber() # index number for the task self.created = time.time() self.backupID = None self.result_defer = Deferred() self.result_defer.addCallback(OnTaskExecutedCallback) self.result_defer.addErrback(OnTaskFailedCallback) parts = self.set_path_id(pathID) self.set_key_id(keyID or my_keys.make_key_id(alias=parts['key_alias'], creator_glob_id=parts['customer'])) self.set_local_path(localPath) if _Debug: lg.out(_DebugLevel, 'new Task created: %r' % self)
def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called by ``p2p.p2p_service`` when command "Files" were received from one of our suppliers. This is an answer from given supplier (after our request) to get a list of our files stored on his machine. """ from p2p import p2p_service supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=my_keys.make_key_id(alias='customer', creator_idurl=my_id.getLocalIDURL(), ), ) input_data = block.Data() except: lg.out(2, 'backup_control.IncomingSupplierListFiles ERROR decrypting data from %s' % newpacket) return False src = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) backups2remove, paths2remove, missed_backups = backup_matrix.ReadRawListFiles(num, src) list_files_orator.IncomingListFiles(newpacket) backup_matrix.SaveLatestRawListFiles(supplier_idurl, src) if _Debug: lg.out(_DebugLevel, 'backup_control.IncomingSupplierListFiles from [%s]: paths2remove=%d, backups2remove=%d missed_backups=%d' % ( nameurl.GetName(supplier_idurl), len(paths2remove), len(backups2remove), len(missed_backups))) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def doSendHisFiles(self, arg): """ Action method. """ customer_key_id = my_keys.make_key_id(alias='customer', creator_idurl=self.customer_idurl) if my_keys.is_key_registered(customer_key_id): list_files.send( customer_idurl=self.customer_idurl, packet_id='%s:%s' % (customer_key_id, packetid.UniqueID(), ), format_type=settings.ListFilesFormat(), key_id=customer_key_id, remote_idurl=self.customer_idurl, # send to the customer ) else: lg.warn('key %s is not registered, not able to send his files' % customer_key_id)
def doInit(self, *args, **kwargs): """ Action method. """ self.queue_id = kwargs['queue_id'] self.start_sequence_id = kwargs['start_sequence_id'] self.end_sequence_id = kwargs['end_sequence_id'] self.archive_folder_path = kwargs['archive_folder_path'] qa, oid, _ = global_id.SplitGlobalQueueID(self.queue_id) self.queue_alias = qa self.queue_owner_id = oid self.queue_owner_idurl = global_id.glob2idurl(self.queue_owner_id) self.group_key_id = my_keys.make_key_id( alias=self.queue_alias, creator_glob_id=self.queue_owner_id) self.suppliers_list = [] self.ecc_map = None self.correctable_errors = 0 self.requested_list_files = {}
def doInit(self, *args, **kwargs): """ Action method. """ self.queue_id = kwargs['queue_id'] self.archive_info = kwargs['archive_info'] self.archive_folder_path = kwargs['archive_folder_path'] self.result_defer = kwargs.get('result_defer') qa, oid, _ = global_id.SplitGlobalQueueID(self.queue_id) self.queue_alias = qa self.queue_owner_id = oid self.queue_owner_idurl = global_id.glob2idurl(self.queue_owner_id) self.group_key_id = my_keys.make_key_id( alias=self.queue_alias, creator_glob_id=self.queue_owner_id) self.backup_job = None self.backup_max_block_num = None self.suppliers_list = [] self.ecc_map = None self.correctable_errors = 0 self.packets_out = {}
def on_list_files(newpacket): json_query = {} try: j = serialization.BytesToDict(newpacket.Payload, keys_to_text=True, values_to_text=True) j['items'][0] json_query = j except: if strng.to_text(newpacket.Payload) == settings.ListFilesFormat(): json_query = { 'items': [ '*', ], } if json_query is None: lg.exc('unrecognized ListFiles() query received') return False # TODO: perform validations before sending back list of files list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if list_files_global_id['key_id']: # customer id and data id can be recognized from packet id # return back list of files according to the request customer_idurl = list_files_global_id['idurl'] key_id = list_files_global_id['key_id'] else: # packet id format is unknown # by default returning back all files from that recipient if he is a customer customer_idurl = newpacket.OwnerID key_id = my_keys.make_key_id(alias='customer', creator_idurl=customer_idurl) key_id = my_keys.latest_key_id(key_id) list_files.send( customer_idurl=customer_idurl, packet_id=newpacket.PacketID, format_type=settings.ListFilesFormat(), key_id=key_id, remote_idurl=newpacket.OwnerID, # send back to the requesting node query_items=json_query['items'], ) return True
def verify_packet_ownership(newpacket, raise_exception=False): """ At that point packet creator is already verified via signature, but creator could be not authorized to store data on that node. So based on owner ID decision must be made what to do with the packet. Returns IDURL of the user who should receive and Ack() or None if not authorized. """ # SECURITY owner_idurl = newpacket.OwnerID creator_idurl = newpacket.CreatorID owner_id = owner_idurl.to_id() creator_id = creator_idurl.to_id() packet_key_alias, packet_owner_id, _ = packetid.SplitKeyOwnerData( newpacket.PacketID) packet_key_id = my_keys.latest_key_id( my_keys.make_key_id(packet_key_alias, creator_idurl, creator_glob_id=packet_owner_id)) if _Debug: lg.args(_DebugLevel, owner_id=owner_id, creator_id=creator_id, packet_id=newpacket.PacketID, key_id_registered=my_keys.is_key_registered(packet_key_id)) if newpacket.Command == commands.Data(): if owner_idurl.to_bin() == creator_idurl.to_bin(): if contactsdb.is_customer(creator_idurl): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 1: customer is sending own data to own supplier' ) return owner_idurl lg.err( 'FAIL, scenario 6: user is not my customer but trying to store data' ) if raise_exception: raise Exception( 'non-authorized user is trying to store data on the supplier' ) return None if contactsdb.is_customer(creator_idurl): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 2: customer wants to store data for someone else on own supplier' ) # TODO: check that, why do we need that? return creator_idurl if packet_owner_id == owner_id: if contactsdb.is_customer(owner_idurl): if my_keys.is_key_registered(packet_key_id): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 3: another authorized user is sending data to customer to be stored on the supplier' ) return creator_idurl lg.err('non-authorized user is trying to store data on the supplier') return None if newpacket.Command in [ commands.DeleteFile(), commands.DeleteBackup(), ]: if owner_idurl == creator_idurl: if contactsdb.is_customer(creator_idurl): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 4: customer wants to remove already stored data on own supplier' ) return owner_idurl lg.err( 'FAIL, scenario 7: non-authorized user is trying to erase data owned by customer from the supplier' ) if raise_exception: raise Exception( 'non-authorized user is trying to erase data owned by customer from the supplier' ) return None if contactsdb.is_customer(creator_idurl): # TODO: check that, why do we need that? if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 8: customer wants to erase existing data that belongs to someone else but stored on the supplier' ) return creator_idurl if packet_owner_id == owner_id: if contactsdb.is_customer(owner_idurl): if my_keys.is_key_registered(packet_key_id): if _Debug: lg.dbg( _DebugLevel, 'OK, scenario 5: another authorized user wants to remove already stored data from the supplier' ) return creator_idurl lg.err('non-authorized user is trying to erase data on the supplier') return None if driver.is_enabled('service_proxy_server'): if _Debug: lg.dbg( _DebugLevel, 'IGNORE, scenario 9: received Data() not authorized, but proxy router service was enabled' ) return None # TODO: # scenario 9: make possible to set "active" flag True/False for any key # this way customer can make virtual location available for other user but in read-only mode raise Exception('scenario not implemented yet, received %r' % newpacket)