def doOpenNextBackup(self, *args, **kwargs): """ Action method. """ global _BackupIDsQueue global _BackupIDsExclude more_backups = set(_BackupIDsQueue) more_backups.difference_update(_BackupIDsExclude) # check it, may be we already fixed all things if len(more_backups) == 0: self.automat('backup-ready') if _Debug: lg.out( _DebugLevel, 'backup_rebuilder.doOpenNextBackup SKIP, queue is empty') return # take a first backup from queue to work on it self.currentBackupID = list(more_backups)[0] # _BackupIDsQueue.pop(self.currentBackupID) self.currentCustomerIDURL = packetid.CustomerIDURL( self.currentBackupID) if _Debug: lg.out( _DebugLevel, 'backup_rebuilder.doOpenNextBackup %s started, queue length: %d' % (self.currentBackupID, len(_BackupIDsQueue)))
def RequestDeleteBackup(BackupID): """ Need to send a "DeleteBackup" command to all suppliers. """ if _Debug: lg.out(_DebugLevel, "p2p_service.RequestDeleteBackup with BackupID=" + str(BackupID)) for supplier_idurl in contactsdb.suppliers(customer_idurl=packetid.CustomerIDURL(BackupID)): if not supplier_idurl: continue SendDeleteBackup(supplier_idurl, BackupID)
def RequestDeleteListPaths(pathIDs): if _Debug: lg.out(_DebugLevel, "p2p_service.RequestDeleteListPaths wish to delete %d paths" % len(pathIDs)) customers = {} for pathID in pathIDs: customer_idurl = packetid.CustomerIDURL(pathID) if customer_idurl not in customers: customers[customer_idurl] = set() customers[customer_idurl].add(pathID) for customer_idurl, pathID_set in customers.items(): for supplier_idurl in contactsdb.suppliers(customer_idurl=customer_idurl): if supplier_idurl: SendDeleteListPaths(supplier_idurl, list(pathID_set))
def _block_finished(self, result, params): if not result: lg.out( 10, 'backup_rebuilder._block_finished FAILED, blockIndex=%d' % self.blockIndex) reactor.callLater(0, self._finish_rebuilding) return try: newData, localData, localParity, reconstructedData, reconstructedParity = result _backupID = params[0] _blockNumber = params[1] except: lg.exc() reactor.callLater(0, self._finish_rebuilding) return if newData: from storage import backup_matrix from customer import data_sender count = 0 customer_idurl = packetid.CustomerIDURL(_backupID) for supplierNum in xrange( contactsdb.num_suppliers(customer_idurl=customer_idurl)): if localData[supplierNum] == 1 and reconstructedData[ supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Data') count += 1 if localParity[supplierNum] == 1 and reconstructedParity[ supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Parity') count += 1 self.blocksSucceed.append(_blockNumber) data_sender.A('new-data') lg.out( 10, 'backup_rebuilder._block_finished !!!!!! %d NEW DATA segments reconstructed, blockIndex=%d' % (count, self.blockIndex)) else: lg.out( 10, 'backup_rebuilder._block_finished NO CHANGES, blockIndex=%d' % self.blockIndex) self.blockIndex -= 1 reactor.callLater(0, self._start_one_block)
def doOpenNextBackup(self, arg): """ Action method. """ global _BackupIDsQueue # check it, may be we already fixed all things if len(_BackupIDsQueue) == 0: self.automat('backup-ready') if _Debug: lg.out(_DebugLevel, 'backup_rebuilder.doOpenNextBackup SKIP, queue is empty') return # take a first backup from queue to work on it self.currentBackupID = _BackupIDsQueue.pop(0) self.currentCustomerIDURL = packetid.CustomerIDURL(self.currentBackupID) if _Debug: lg.out(_DebugLevel, 'backup_rebuilder.doOpenNextBackup %s started, queue length: %d' % ( self.currentBackupID, len(_BackupIDsQueue)))
def _block_finished(self, result, params): if not result: lg.out(10, 'backup_rebuilder._block_finished FAILED, blockIndex=%d' % self.blockIndex) reactor.callLater(0, self._finish_rebuilding) # @UndefinedVariable return try: newData, localData, localParity, reconstructedData, reconstructedParity = result _backupID = params[0] _blockNumber = params[1] except: lg.exc() reactor.callLater(0, self._finish_rebuilding) # @UndefinedVariable return lg.out(10, 'backup_rebuilder._block_finished backupID=%r blockNumber=%r newData=%r' % ( _backupID, _blockNumber, newData)) lg.out(10, ' localData=%r localParity=%r' % (localData, localParity)) if newData: from storage import backup_matrix from customer import data_sender count = 0 customer_idurl = packetid.CustomerIDURL(_backupID) for supplierNum in range(contactsdb.num_suppliers(customer_idurl=customer_idurl)): try: localData[supplierNum] localParity[supplierNum] reconstructedData[supplierNum] reconstructedParity[supplierNum] except: lg.err('invalid result from the task: %s' % repr(params)) lg.out(10, 'result is %s' % repr(result)) lg.exc() continue if localData[supplierNum] == 1 and reconstructedData[supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Data') count += 1 if localParity[supplierNum] == 1 and reconstructedParity[supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Parity') count += 1 self.blocksSucceed.append(_blockNumber) data_sender.A('new-data') lg.out(10, ' !!!!!! %d NEW DATA segments reconstructed, blockIndex=%d' % ( count, self.blockIndex)) else: lg.out(10, ' NO CHANGES, blockIndex=%d' % self.blockIndex) self.blockIndex -= 1 reactor.callLater(0, self._start_one_block) # @UndefinedVariable
def RequestDeleteBackup(BackupID): """ Need to send a "DeleteBackup" command to all suppliers. """ if _Debug: lg.out( _DebugLevel, "p2p_service.RequestDeleteBackup with BackupID=" + str(BackupID)) for supplier_idurl in contactsdb.suppliers( customer_idurl=packetid.CustomerIDURL(BackupID)): if not supplier_idurl: continue # prevItems = [] # transport_control.SendQueueSearch(BackupID) # found = False # for workitem in prevItems: # if workitem.remoteid == supplier: # found = True # break # if found: # continue SendDeleteBackup(supplier_idurl, BackupID)
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done', 0) return from storage import backup_matrix from storage import backup_fs backup_matrix.ReadLocalFiles() progress = 0 # if _Debug: # lg.out(_DebugLevel, 'data_sender.doScanAndQueue with %d known customers' % len(contactsdb.known_customers())) for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getIDURL(): # TODO: check that later if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if not known_suppliers or id_url.is_some_empty(known_suppliers): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue found empty supplier(s) for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids( list(backup_matrix.local_files().keys()), True) if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue found %d known suppliers for customer %r with %d backups' % (len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue customerGlobalID, pathID, _ = packetid.SplitBackupID( backupID, normalize_key_alias=True) keyAlias = packetid.KeyAlias(customerGlobalID) item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID( customer_idurl, keyAlias)) if not item: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r path not exist in catalog' % backupID) continue if item.key_id and customerGlobalID and customerGlobalID != item.key_id: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r key is different in the catalog: %r ~ %r' % ( backupID, customerGlobalID, item.key_id, )) continue packetsBySupplier = backup_matrix.ScanBlocksToSend( backupID, limit_per_supplier=None) total_for_customer = sum( [len(v) for v in packetsBySupplier.values()]) if total_for_customer: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue sending %r for customer %r with %d pieces' % (item.name(), customer_idurl, total_for_customer)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) if supplierNum >= 0 and supplierNum < len( known_suppliers): supplier_idurl = known_suppliers[supplierNum] else: supplier_idurl = None if not supplier_idurl: lg.warn( 'skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % (supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp( packetID) if backupID_ != backupID: lg.warn( 'skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn( 'skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue( supplier_idurl, packetID): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue %s already in sending queue for %r' % (packetID, supplier_idurl)) continue latest_progress = self.statistic.get( supplier_idurl, {}).get('latest', '') if len(latest_progress ) >= 3 and latest_progress.endswith('---'): if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending to supplier %r because multiple packets already failed' % supplier_idurl) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending, queue is busy for %r' % supplier_idurl) continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue %s is not a file' % filename) continue itemInfo = item.to_json() if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getIDURL(), lambda packet, ownerID, packetID: self._packetAcked( packet, ownerID, packetID, itemInfo), lambda remoteID, packetID, why: self._packetFailed( remoteID, packetID, why, itemInfo), ): progress += 1 if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue for %r put %s in the queue progress=%d' % ( item.name(), packetID, progress, )) else: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done', progress)
def doScanAndQueue(self, *args, **kwargs): global _ShutdownFlag if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag=%r' % _ShutdownFlag) if _Debug: log = open(os.path.join(settings.LogsDir(), 'data_sender.log'), 'w') log.write(u'doScanAndQueue %s\n' % time.asctime()) # .decode('utf-8') if _ShutdownFlag: if _Debug: log.write(u'doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done') if _Debug: log.flush() log.close() return for customer_idurl in contactsdb.known_customers(): if '' not in contactsdb.suppliers(customer_idurl): from storage import backup_matrix for backupID in misc.sorted_backup_ids( list(backup_matrix.local_files().keys()), True): this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue packetsBySupplier = backup_matrix.ScanBlocksToSend( backupID) if _Debug: log.write(u'%s\n' % packetsBySupplier) for supplierNum in packetsBySupplier.keys(): supplier_idurl = contactsdb.supplier( supplierNum, customer_idurl=customer_idurl) if not supplier_idurl: lg.warn( 'unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%s' % (supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp( packetID) if backupID_ != backupID: lg.warn( 'unexpected backupID supplierNum=%s for %s, customer_idurl=%s' % (packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn( 'unexpected supplierNum %s for %s, customer_idurl=%s' % (packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue( supplier_idurl, packetID): if _Debug: log.write( u'%s already in sending queue for %s\n' % (packetID, supplier_idurl)) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: log.write(u'skip, not ok to send %s\n' % supplier_idurl) continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) # tranByID = gate.transfers_out_by_idurl().get(supplier_idurl, []) # if len(tranByID) > 3: # log.write(u'transfers by %s: %d\n' % (supplier_idurl, len(tranByID))) # continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: log.write(u'%s is not a file\n' % filename) continue if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getLocalID(), self._packetAcked, self._packetFailed, ): if _Debug: log.write( u'io_throttle.QueueSendFile %s\n' % packetID) else: if _Debug: log.write( u'io_throttle.QueueSendFile FAILED %s\n' % packetID) # lg.out(6, ' %s for %s' % (packetID, backupID)) # DEBUG # break self.automat('scan-done') if _Debug: log.flush() log.close()
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done', 0) return from storage import backup_matrix from storage import backup_fs backup_matrix.ReadLocalFiles() progress = 0 if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue with %d known customers' % len(contactsdb.known_customers())) for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getLocalID(): # TODO: check that later if _Debug: lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if not known_suppliers or id_url.is_some_empty(known_suppliers): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue found empty supplier(s) for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids(list(backup_matrix.local_files().keys()), True) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue found %d known suppliers for customer %r with %d backups' % ( len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue customerGlobalID, pathID, _ = packetid.SplitBackupID(backupID, normalize_key_alias=True) item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID(customer_idurl=customer_idurl)) if not item: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r path not exist in catalog' % backupID) continue if item.key_id and customerGlobalID and customerGlobalID != item.key_id: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r key is different in the catalog' % backupID) continue packetsBySupplier = backup_matrix.ScanBlocksToSend(backupID, limit_per_supplier=None) total_for_customer = sum([len(v) for v in packetsBySupplier.values()]) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue to be delivered for customer %r : %d' % (customer_idurl, total_for_customer)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) if supplierNum >= 0 and supplierNum < len(known_suppliers): supplier_idurl = known_suppliers[supplierNum] else: supplier_idurl = None if not supplier_idurl: lg.warn('skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % ( supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp(packetID) if backupID_ != backupID: lg.warn('skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % ( packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn('skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % ( packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue(supplier_idurl, packetID): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue %s already in sending queue for %r' % (packetID, supplier_idurl)) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue skip sending, queue is busy for %r\n' % supplier_idurl) continue # customerGlobalID, pathID = packetid.SplitPacketID(packetID) # tranByID = gate.transfers_out_by_idurl().get(supplier_idurl, []) # if len(tranByID) > 3: # log.write(u'transfers by %s: %d\n' % (supplier_idurl, len(tranByID))) # continue customerGlobalID, pathID = packetid.SplitPacketID(packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue %s is not a file\n' % filename) continue if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getIDURL(), self._packetAcked, self._packetFailed, ): progress += 1 if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue put %s in the queue progress=%d' % (packetID, progress, )) else: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done', progress)
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag=%r' % _ShutdownFlag) if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, ' _ShutdownFlag is True\n') self.automat('scan-done') return from storage import backup_matrix backup_matrix.ReadLocalFiles() progress = 0 for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getLocalIDURL(): # TODO: check that later if _Debug: lg.out( _DebugLevel + 6, ' skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if b'' in known_suppliers or '' in known_suppliers: if _Debug: lg.out( _DebugLevel, ' found empty supplier for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids( list(backup_matrix.local_files().keys()), True) if _Debug: lg.out( _DebugLevel, ' found %d known suppliers for customer %r with %d backups' % (len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue packetsBySupplier = backup_matrix.ScanBlocksToSend(backupID) if _Debug: lg.out( _DebugLevel, ' packets for customer %r : %s' % (customer_idurl, packetsBySupplier)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) try: supplier_idurl = known_suppliers[supplierNum] except: lg.exc() continue if not supplier_idurl: lg.warn( 'unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % (supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp( packetID) if backupID_ != backupID: lg.warn( 'unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn( 'unexpected supplierNum %s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue( supplier_idurl, packetID): if _Debug: lg.out( _DebugLevel, ' %s already in sending queue for %r' % (packetID, supplier_idurl)) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out( _DebugLevel + 6, ' skip, not ok to send %s\n' % supplier_idurl) continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) # tranByID = gate.transfers_out_by_idurl().get(supplier_idurl, []) # if len(tranByID) > 3: # log.write(u'transfers by %s: %d\n' % (supplier_idurl, len(tranByID))) # continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out(_DebugLevel, ' %s is not a file\n' % filename) continue if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getLocalID(), self._packetAcked, self._packetFailed, ): progress += 1 if _Debug: lg.out( _DebugLevel, ' io_throttle.QueueSendFile %s' % packetID) else: if _Debug: lg.out( _DebugLevel, ' io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done')