def doRemoveTempFile(self, *args, **kwargs): """ Action method. """ if not args or not len(args) > 1: return filename = args[1] if filename: tmpfile.throw_out(filename, 'block restored') if settings.getBackupsKeepLocalCopies(): return from storage import backup_matrix from storage import backup_rebuilder if not backup_rebuilder.ReadStoppedFlag(): if backup_rebuilder.A().currentBackupID is not None: if backup_rebuilder.A().currentBackupID == self.backup_id: if _Debug: lg.out( _DebugLevel, 'restore_worker.doRemoveTempFile SKIP because rebuilding in process' ) return count = 0 for supplierNum in range( contactsdb.num_suppliers(customer_idurl=self.customer_idurl)): supplierIDURL = contactsdb.supplier( supplierNum, customer_idurl=self.customer_idurl) if not supplierIDURL: continue for dataORparity in [ 'Data', 'Parity', ]: packetID = packetid.MakePacketID(self.backup_id, self.block_number, supplierNum, dataORparity) customer, remotePath = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, remotePath) if os.path.isfile(filename): try: os.remove(filename) except: lg.exc() continue count += 1 backup_matrix.LocalBlockReport(self.backup_id, self.block_number, *args, **kwargs) if _Debug: lg.out( _DebugLevel, 'restore_worker.doRemoveTempFile %d files were removed' % count)
def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called when command "Files" were received from one of my suppliers. This is an answer from given supplier (after my request) to get a list of our files stored on his machine. """ supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator target_key_id = my_keys.latest_key_id(list_files_global_id['key_id']) if not my_keys.is_key_private(target_key_id): lg.warn('key %r not registered, not possible to decrypt ListFiles() packet from %r' % (target_key_id, supplier_idurl, )) return False try: block = encrypted.Unserialize(newpacket.Payload, decrypt_key=target_key_id, ) input_data = block.Data() except: lg.err('failed decrypting data from packet %r received from %r' % (newpacket, supplier_idurl)) return False list_files_raw = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) remote_files_changed, backups2remove, paths2remove, missed_backups = backup_matrix.process_raw_list_files( supplier_num=num, list_files_text_body=list_files_raw, customer_idurl=None, is_in_sync=None, auto_create=False, ) list_files_orator.IncomingListFiles(newpacket) if remote_files_changed: backup_matrix.SaveLatestRawListFiles(supplier_idurl, list_files_raw) if _Debug: lg.args(_DebugLevel, supplier=nameurl.GetName(supplier_idurl), customer=nameurl.GetName(customer_idurl), backups2remove=len(backups2remove), paths2remove=len(paths2remove), files_changed=remote_files_changed, missed_backups=len(missed_backups), ) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called by ``p2p.p2p_service`` when command "Files" were received from one of our suppliers. This is an answer from given supplier (after our request) to get a list of our files stored on his machine. """ from p2p import p2p_service supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=my_keys.make_key_id(alias='customer', creator_idurl=my_id.getLocalIDURL(), ), ) input_data = block.Data() except: lg.out(2, 'backup_control.IncomingSupplierListFiles ERROR decrypting data from %s' % newpacket) return False src = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) backups2remove, paths2remove, missed_backups = backup_matrix.ReadRawListFiles(num, src) list_files_orator.IncomingListFiles(newpacket) backup_matrix.SaveLatestRawListFiles(supplier_idurl, src) if _Debug: lg.out(_DebugLevel, 'backup_control.IncomingSupplierListFiles from [%s]: paths2remove=%d, backups2remove=%d missed_backups=%d' % ( nameurl.GetName(supplier_idurl), len(paths2remove), len(backups2remove), len(missed_backups))) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def IncomingSupplierListFiles(newpacket): """ Called by ``p2p.p2p_service`` when command "Files" were received from one of our suppliers. This is an answer from given supplier (after our request) to get a list of our files stored on his machine. """ from p2p import p2p_service supplier_idurl = newpacket.OwnerID customer_idurl = my_id.getLocalID() if newpacket.PacketID.count(':') and newpacket.PacketID.count('@'): try: customer_idurl = global_id.GlobalUserToIDURL(newpacket.PacketID.split(':')[0]) except: lg.exc() num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.out(2, 'backup_control.IncomingSupplierListFiles ERROR unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator src = list_files.UnpackListFiles(newpacket.Payload, settings.ListFilesFormat()) backups2remove, paths2remove, missed_backups = backup_matrix.ReadRawListFiles(num, src) list_files_orator.IncomingListFiles(newpacket) backup_matrix.SaveLatestRawListFiles(supplier_idurl, src) if _Debug: lg.out(_DebugLevel, 'backup_control.IncomingSupplierListFiles from [%s]: paths2remove=%d, backups2remove=%d missed_backups=%d' % ( nameurl.GetName(supplier_idurl), len(paths2remove), len(backups2remove), len(missed_backups))) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def start(self): from raid import raid_worker from storage import backup_rebuilder raid_worker.A('init') backup_rebuilder.A('init') return True
def doCleanUpBackups(self, *args, **kwargs): # here we check all backups we have and remove the old one # user can set how many versions of that file or folder to keep # other versions (older) will be removed here from storage import backup_rebuilder try: self.backups_progress_last_iteration = len( backup_rebuilder.A().backupsWasRebuilt) except: self.backups_progress_last_iteration = 0 versionsToKeep = settings.getBackupsMaxCopies() if not contactsdb.num_suppliers(): bytesUsed = 0 else: bytesUsed = backup_fs.sizebackups() / contactsdb.num_suppliers() bytesNeeded = diskspace.GetBytesFromString(settings.getNeededString(), 0) customerGlobID = my_id.getGlobalID() if _Debug: lg.out( _DebugLevel, 'backup_monitor.doCleanUpBackups backupsToKeep=%d used=%d needed=%d' % (versionsToKeep, bytesUsed, bytesNeeded)) delete_count = 0 if versionsToKeep > 0: for pathID, localPath, itemInfo in backup_fs.IterateIDs(): pathID = global_id.CanonicalID(pathID) if backup_control.IsPathInProcess(pathID): continue versions = itemInfo.list_versions() # TODO: do we need to sort the list? it comes from a set, so must be sorted may be while len(versions) > versionsToKeep: backupID = packetid.MakeBackupID(customerGlobID, pathID, versions.pop(0)) if _Debug: lg.out( _DebugLevel, 'backup_monitor.doCleanUpBackups %d of %d backups for %s, so remove older %s' % (len(versions), versionsToKeep, localPath, backupID)) backup_control.DeleteBackup(backupID, saveDB=False, calculate=False) delete_count += 1 # we need also to fit used space into needed space (given from other users) # they trust us - do not need to take extra space from our friends # so remove oldest backups, but keep at least one for every folder - at least locally! # still our suppliers will remove our "extra" files by their "local_tester" if bytesNeeded <= bytesUsed: sizeOk = False for pathID, localPath, itemInfo in backup_fs.IterateIDs(): if sizeOk: break pathID = global_id.CanonicalID(pathID) versions = itemInfo.list_versions(True, False) if len(versions) <= 1: continue for version in versions[1:]: backupID = packetid.MakeBackupID(customerGlobID, pathID, version) versionInfo = itemInfo.get_version_info(version) if versionInfo[1] > 0: if _Debug: lg.out( _DebugLevel, 'backup_monitor.doCleanUpBackups over use %d of %d, so remove %s of %s' % (bytesUsed, bytesNeeded, backupID, localPath)) backup_control.DeleteBackup(backupID, saveDB=False, calculate=False) delete_count += 1 bytesUsed -= versionInfo[1] if bytesNeeded > bytesUsed: sizeOk = True break if delete_count > 0: backup_fs.Scan() backup_fs.Calculate() backup_control.Save() from main import control control.request_update() collected = gc.collect() if self.backups_progress_last_iteration > 0: if _Debug: lg.out( _DebugLevel, 'backup_monitor.doCleanUpBackups sending "restart", backups_progress_last_iteration=%s' % self.backups_progress_last_iteration) reactor.callLater(1, self.automat, 'restart') # @UndefinedVariable if _Debug: lg.out( _DebugLevel, 'backup_monitor.doCleanUpBackups collected %d objects' % collected)
def A(self, event, *args, **kwargs): from customer import fire_hire from customer import data_sender from customer import list_files_orator from storage import backup_rebuilder from storage import index_synchronizer #---READY--- if self.state == 'READY': if event == 'timer-5sec': self.doOverallCheckUp(*args, **kwargs) elif event == 'restart' or event == 'suppliers-changed' or ( event == 'instant' and self.RestartAgain): self.state = 'FIRE_HIRE' self.RestartAgain = False self.doRememberSuppliers(*args, **kwargs) fire_hire.A('restart') #---LIST_FILES--- elif self.state == 'LIST_FILES': if (event == 'list_files_orator.state' and args[0] == 'NO_FILES'): self.state = 'READY' elif (event == 'list_files_orator.state' and args[0] == 'SAW_FILES'): self.state = 'LIST_BACKUPS' index_synchronizer.A('pull') data_sender.A('restart') self.doPrepareListBackups(*args, **kwargs) elif event == 'restart': self.RestartAgain = True elif event == 'suppliers-changed': self.state = 'READY' self.RestartAgain = True #---LIST_BACKUPS--- elif self.state == 'LIST_BACKUPS': if event == 'list-backups-done': self.state = 'REBUILDING' backup_rebuilder.A('start') elif event == 'restart': self.RestartAgain = True elif event == 'suppliers-changed': self.state = 'READY' self.RestartAgain = True elif event == 'restart': self.state = 'FIRE_HIRE' fire_hire.A('restart') #---REBUILDING--- elif self.state == 'REBUILDING': if (event == 'backup_rebuilder.state' and args[0] in ['DONE', 'STOPPED']): self.state = 'READY' self.doCleanUpBackups(*args, **kwargs) data_sender.A('restart') elif event == 'restart' or event == 'suppliers-changed': self.state = 'FIRE_HIRE' backup_rebuilder.SetStoppedFlag() fire_hire.A('restart') #---FIRE_HIRE--- elif self.state == 'FIRE_HIRE': if event == 'suppliers-changed' and self.isSuppliersNumberChanged( *args, **kwargs): self.state = 'LIST_FILES' self.doDeleteAllBackups(*args, **kwargs) self.doRememberSuppliers(*args, **kwargs) list_files_orator.A('need-files') elif event == 'fire-hire-finished': self.state = 'LIST_FILES' list_files_orator.A('need-files') elif event == 'suppliers-changed' and not self.isSuppliersNumberChanged( *args, **kwargs): self.state = 'LIST_FILES' self.doUpdateSuppliers(*args, **kwargs) self.doRememberSuppliers(*args, **kwargs) list_files_orator.A('need-files') elif event == 'restart': self.RestartAgain = True #---AT_STARTUP--- elif self.state == 'AT_STARTUP': if event == 'init': self.state = 'READY' self.RestartAgain = False return None
def doRemoveUnusedFiles(self, arg): # we want to remove files for this block # because we only need them during rebuilding if settings.getBackupsKeepLocalCopies() is True: # if user set this in settings - he want to keep the local files return # ... user do not want to keep local backups if settings.getGeneralWaitSuppliers() is True: from customer import fire_hire # but he want to be sure - all suppliers are green for a long time if len(contact_status.listOfflineSuppliers( )) > 0 or time.time() - fire_hire.GetLastFireTime() < 24 * 60 * 60: # some people are not there or we do not have stable team yet # do not remove the files because we need it to rebuild return count = 0 from storage import backup_matrix from storage import restore_monitor from storage import backup_rebuilder if _Debug: lg.out(_DebugLevel, 'data_sender.doRemoveUnusedFiles') for backupID in misc.sorted_backup_ids( backup_matrix.local_files().keys()): if restore_monitor.IsWorking(backupID): if _Debug: lg.out(_DebugLevel, ' %s : SKIP, because restoring' % backupID) continue if backup_rebuilder.IsBackupNeedsWork(backupID): if _Debug: lg.out( _DebugLevel, ' %s : SKIP, because needs rebuilding' % backupID) continue if not backup_rebuilder.ReadStoppedFlag(): if backup_rebuilder.A().currentBackupID is not None: if backup_rebuilder.A().currentBackupID == backupID: if _Debug: lg.out( _DebugLevel, ' %s : SKIP, because rebuilding is in process' % backupID) continue packets = backup_matrix.ScanBlocksToRemove( backupID, settings.getGeneralWaitSuppliers()) for packetID in packets: customer, pathID = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, pathID) if os.path.isfile(filename): try: os.remove(filename) # lg.out(6, ' ' + os.path.basename(filename)) except: lg.exc() continue count += 1 if _Debug: lg.out(_DebugLevel, ' %d files were removed' % count) backup_matrix.ReadLocalFiles()
def doRemoveUnusedFiles(self, *args, **kwargs): """ Action method. """ if not list_files_orator.is_synchronized(): # always make sure we have a very fresh info about remote files before take any actions return # we want to remove files for this block # because we only need them during rebuilding if settings.getBackupsKeepLocalCopies() is True: # if user set this in settings - he want to keep the local files return # ... user do not want to keep local backups if settings.getGeneralWaitSuppliers() is True: from customer import fire_hire # but he want to be sure - all suppliers are green for a long time if len(online_status.listOfflineSuppliers()) > 0 or ( time.time() - fire_hire.GetLastFireTime() < 24 * 60 * 60): # some people are not there or we do not have stable team yet # do not remove the files because we need it to rebuild return count = 0 from storage import backup_matrix from storage import restore_monitor from storage import backup_rebuilder if _Debug: lg.out(_DebugLevel, 'data_sender.doRemoveUnusedFiles') for backupID in misc.sorted_backup_ids( list(backup_matrix.local_files().keys())): if restore_monitor.IsWorking(backupID): if _Debug: lg.out(_DebugLevel, ' %s : SKIP, because restoring' % backupID) continue if backup_rebuilder.IsBackupNeedsWork(backupID): if _Debug: lg.out( _DebugLevel, ' %s : SKIP, because needs rebuilding' % backupID) continue if not backup_rebuilder.ReadStoppedFlag(): if backup_rebuilder.A().currentBackupID is not None: if backup_rebuilder.A().currentBackupID == backupID: if _Debug: lg.out( _DebugLevel, ' %s : SKIP, because rebuilding is in process' % backupID) continue if backupID not in backup_matrix.remote_files(): if _Debug: lg.out( _DebugLevel, ' going to erase %s because not found in remote files' % backupID) customer, pathID, version = packetid.SplitBackupID(backupID) dirpath = os.path.join(settings.getLocalBackupsDir(), customer, pathID, version) if os.path.isdir(dirpath): try: count += bpio.rmdir_recursive(dirpath, ignore_errors=True) except: lg.exc() continue packets = backup_matrix.ScanBlocksToRemove( backupID, check_all_suppliers=settings.getGeneralWaitSuppliers()) for packetID in packets: customer, pathID = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, pathID) if os.path.isfile(filename): try: os.remove(filename) except: lg.exc() continue count += 1 if _Debug: lg.out(_DebugLevel, ' %d files were removed' % count) backup_matrix.ReadLocalFiles()
def start(self): from storage import backup_rebuilder backup_rebuilder.A('init') return True