Пример #1
0
 def isRequestQueueEmpty(self, arg):
     supplierSet = backup_matrix.suppliers_set()
     for supplierNum in range(supplierSet.supplierCount):
         supplierID = supplierSet.suppliers[supplierNum]
         if io_throttle.HasBackupIDInSendQueue(supplierID, self.currentBackupID):
             return False
     return True
Пример #2
0
 def doPrepareNextBackup(self, arg):
     global _BackupIDsQueue
     # clear block number from previous iteration
     self.currentBlockNumber = -1
     # check it, may be we already fixed all things
     if len(_BackupIDsQueue) == 0:
         self.workingBlocksQueue = []
         self.automat('backup-ready')
         return
     # take a first backup from queue to work on it
     backupID = _BackupIDsQueue.pop(0)
     # if remote data structure is not exist for this backup - create it
     # this mean this is only local backup!
     if not backup_matrix.remote_files().has_key(backupID):
         backup_matrix.remote_files()[backupID] = {}
         # we create empty remote info for every local block
         # range(0) should return []
         for blockNum in range(backup_matrix.local_max_block_numbers().get(backupID, -1) + 1):
             backup_matrix.remote_files()[backupID][blockNum] = {
                 'D': [0] * backup_matrix.suppliers_set().supplierCount,
                 'P': [0] * backup_matrix.suppliers_set().supplierCount }
     # detect missing blocks from remote info
     self.workingBlocksQueue = backup_matrix.ScanMissingBlocks(backupID)
     dhnio.Dprint(8, 'backup_rebuilder.doPrepareNextBackup [%s] working blocks: %s' % (backupID, str(self.workingBlocksQueue)))
     # find the correct max block number for this backup
     # we can have remote and local files
     # will take biggest block number from both 
     backupMaxBlock = max(backup_matrix.remote_max_block_numbers().get(backupID, -1),
                          backup_matrix.local_max_block_numbers().get(backupID, -1))
     # now need to remember this biggest block number
     # remote info may have less blocks - need to create empty info for missing blocks
     for blockNum in range(backupMaxBlock + 1):
         if backup_matrix.remote_files()[backupID].has_key(blockNum):
             continue
         backup_matrix.remote_files()[backupID][blockNum] = {
             'D': [0] * backup_matrix.suppliers_set().supplierCount,
             'P': [0] * backup_matrix.suppliers_set().supplierCount }
     if self.currentBackupID:
         # clear requesting queue from previous task
         io_throttle.DeleteBackupRequests(self.currentBackupID)
     # really take the next backup
     self.currentBackupID = backupID
     # clear requesting queue, remove old packets for this backup, we will send them again
     io_throttle.DeleteBackupRequests(self.currentBackupID)
     # dhnio.Dprint(6, 'backup_rebuilder.doTakeNextBackup currentBackupID=%s workingBlocksQueue=%d' % (self.currentBackupID, len(self.workingBlocksQueue)))
     self.automat('backup-ready')
Пример #3
0
    def isChanceToRebuild(self, arg):
#         return len(self.missingSuppliers) <= eccmap.Current().CorrectableErrors
        supplierSet = backup_matrix.suppliers_set()
        # start checking in reverse order, see below for explanation
        for blockIndex in range(len(self.workingBlocksQueue)-1, -1, -1):
            blockNumber = self.workingBlocksQueue[blockIndex]
            if eccmap.Current().CanMakeProgress(
                    backup_matrix.GetLocalDataArray(self.currentBackupID, blockNumber),
                    backup_matrix.GetLocalParityArray(self.currentBackupID, blockNumber)):
                return True
        return False
Пример #4
0
 def _prepare_one_block(): 
     if self.blockIndex < 0:
         # dhnio.Dprint(8, '        _prepare_one_block finish all blocks')
         reactor.callLater(0, _finish_all_blocks)
         return
     self.currentBlockNumber = self.workingBlocksQueue[self.blockIndex]
     # dhnio.Dprint(8, '        _prepare_one_block %d to rebuild' % self.currentBlockNumber)
     self.workBlock = block_rebuilder.BlockRebuilder(
         eccmap.Current(), #self.eccMap,
         self.currentBackupID,
         self.currentBlockNumber,
         backup_matrix.suppliers_set(),
         backup_matrix.GetRemoteDataArray(self.currentBackupID, self.currentBlockNumber),
         backup_matrix.GetRemoteParityArray(self.currentBackupID, self.currentBlockNumber),
         backup_matrix.GetLocalDataArray(self.currentBackupID, self.currentBlockNumber),
         backup_matrix.GetLocalParityArray(self.currentBackupID, self.currentBlockNumber),)
     reactor.callLater(0, _identify_block_packets)
Пример #5
0
def SetSupplierList(supplierList):
    """
    Set a list of suppliers IDs, this is called by `p2p.central_service` 
    when a list of my suppliers comes from Central server.  
    Going from 2 to 4 suppliers (or whatever) invalidates all backups,
    all suppliers was changed because its number was changed.
    So we lost everything! Definitely suppliers number should be a sort of constant number.
    """
    if len(supplierList) != backup_matrix.suppliers_set().supplierCount:
        dhnio.Dprint(2, "backup_control.SetSupplierList got list of %d suppliers, but we have %d now!" % (len(supplierList), backup_matrix.suppliers_set().supplierCount))
        # cancel all tasks and jobs
        DeleteAllTasks()
        AbortAllRunningBackups()
        # remove all local files and all backups
        DeleteAllBackups()
        # erase all remote info
        backup_matrix.ClearRemoteInfo()
        # also erase local info
        backup_matrix.ClearLocalInfo()
        # restart backup_monitor
        backup_monitor.Restart()
        # restart db keeper to save the index on new machines
        backup_db_keeper.A('restart')
    # only single suppliers changed
    # need to erase info only for them 
    elif backup_matrix.suppliers_set().SuppliersChanged(supplierList):
        # take a list of suppliers positions that was changed
        changedSupplierNums = backup_matrix.suppliers_set().SuppliersChangedNumbers(supplierList)
        # notify io_throttle that we do not neeed already this suppliers
        for supplierNum in changedSupplierNums:
            dhnio.Dprint(2, "backup_control.SetSupplierList supplier %d changed: [%s]->[%s]" % (
                supplierNum, nameurl.GetName(backup_matrix.suppliers_set().suppliers[supplierNum]), nameurl.GetName(supplierList[supplierNum])))
            io_throttle.DeleteSuppliers([backup_matrix.suppliers_set().suppliers[supplierNum],])
        # erase (set to 0) remote info for this guys
        backup_matrix.ClearSupplierRemoteInfo(supplierNum)
        # restart backup_monitor
        backup_monitor.Restart()
        # restart db keeper to save the index on all machines including a new one
        backup_db_keeper.A('restart')
    # finally save the list of current suppliers and clear all stats 
    backup_matrix.suppliers_set().UpdateSuppliers(supplierList)
Пример #6
0
 def doRequestAvailableBlocks(self, arg):
     self.missingPackets = 0
     # self.missingSuppliers.clear()
     # here we want to request some packets before we start working to rebuild the missed blocks
     supplierSet = backup_matrix.suppliers_set()
     availableSuppliers = supplierSet.GetActiveArray()
     # remember how many requests we did on this iteration
     total_requests_count = 0
     # at the moment I do download everything I have available and needed
     if '' in contacts.getSupplierIDs():
         self.automat('requests-sent', total_requests_count)
         return
     for supplierNum in range(supplierSet.supplierCount):
         supplierID = supplierSet.suppliers[supplierNum]
         requests_count = 0
         # we do requests in reverse order because we start rebuilding from the last block 
         # for blockNum in range(self.currentBlockNumber, -1, -1):
         for blockIndex in range(len(self.workingBlocksQueue)-1, -1, -1):
             blockNum = self.workingBlocksQueue[blockIndex] 
             # do not keep too many requests in the queue
             if io_throttle.GetRequestQueueLength(supplierID) >= 16:
                 break
             # also don't do too many requests at once
             if requests_count > 16:
                 break
             remoteData = backup_matrix.GetRemoteDataArray(self.currentBackupID, blockNum)
             remoteParity = backup_matrix.GetRemoteParityArray(self.currentBackupID, blockNum)
             localData = backup_matrix.GetLocalDataArray(self.currentBackupID, blockNum)
             localParity = backup_matrix.GetLocalParityArray(self.currentBackupID, blockNum)
             # if the remote Data exist and is available because supplier is on line,
             # but we do not have it on hand - do request  
             if localData[supplierNum] == 0:
                 PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Data')
                 if remoteData[supplierNum] == 1:
                     if availableSuppliers[supplierNum]:
                         # if supplier is not alive - we can't request from him           
                         if not io_throttle.HasPacketInRequestQueue(supplierID, PacketID):
                             io_throttle.QueueRequestFile(
                                 self.FileReceived, 
                                 misc.getLocalID(), 
                                 PacketID, 
                                 misc.getLocalID(), 
                                 supplierID)
                             requests_count += 1
                 else:
                     # count this packet as missing
                     self.missingPackets += 1
                     # also mark this guy as one who dont have any data - nor local nor remote 
                     # self.missingSuppliers.add(supplierNum)
             # same for Parity
             if localParity[supplierNum] == 0:
                 PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Parity')
                 if remoteParity[supplierNum] == 1: 
                     if availableSuppliers[supplierNum]:
                         if not io_throttle.HasPacketInRequestQueue(supplierID, PacketID):
                             io_throttle.QueueRequestFile(
                                 self.FileReceived, 
                                 misc.getLocalID(), 
                                 PacketID, 
                                 misc.getLocalID(), 
                                 supplierID)
                             requests_count += 1
                 else:
                     self.missingPackets += 1
                     # self.missingSuppliers.add(supplierNum)
         total_requests_count += requests_count
     self.automat('requests-sent', total_requests_count)