def _block_finished(self, result, params): if not result: lg.out( 10, 'backup_rebuilder._block_finished FAILED, blockIndex=%d' % self.blockIndex) reactor.callLater(0, self._finish_rebuilding) return try: newData, localData, localParity, reconstructedData, reconstructedParity = result _backupID = params[0] _blockNumber = params[1] except: lg.exc() reactor.callLater(0, self._finish_rebuilding) return if newData: from storage import backup_matrix from customer import data_sender count = 0 customer_idurl = packetid.CustomerIDURL(_backupID) for supplierNum in xrange( contactsdb.num_suppliers(customer_idurl=customer_idurl)): if localData[supplierNum] == 1 and reconstructedData[ supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Data') count += 1 if localParity[supplierNum] == 1 and reconstructedParity[ supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Parity') count += 1 self.blocksSucceed.append(_blockNumber) data_sender.A('new-data') lg.out( 10, 'backup_rebuilder._block_finished !!!!!! %d NEW DATA segments reconstructed, blockIndex=%d' % (count, self.blockIndex)) else: lg.out( 10, 'backup_rebuilder._block_finished NO CHANGES, blockIndex=%d' % self.blockIndex) self.blockIndex -= 1 reactor.callLater(0, self._start_one_block)
def _block_finished(self, result, params): if not result: lg.out(10, 'backup_rebuilder._block_finished FAILED, blockIndex=%d' % self.blockIndex) reactor.callLater(0, self._finish_rebuilding) # @UndefinedVariable return try: newData, localData, localParity, reconstructedData, reconstructedParity = result _backupID = params[0] _blockNumber = params[1] except: lg.exc() reactor.callLater(0, self._finish_rebuilding) # @UndefinedVariable return lg.out(10, 'backup_rebuilder._block_finished backupID=%r blockNumber=%r newData=%r' % ( _backupID, _blockNumber, newData)) lg.out(10, ' localData=%r localParity=%r' % (localData, localParity)) if newData: from storage import backup_matrix from customer import data_sender count = 0 customer_idurl = packetid.CustomerIDURL(_backupID) for supplierNum in range(contactsdb.num_suppliers(customer_idurl=customer_idurl)): try: localData[supplierNum] localParity[supplierNum] reconstructedData[supplierNum] reconstructedParity[supplierNum] except: lg.err('invalid result from the task: %s' % repr(params)) lg.out(10, 'result is %s' % repr(result)) lg.exc() continue if localData[supplierNum] == 1 and reconstructedData[supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Data') count += 1 if localParity[supplierNum] == 1 and reconstructedParity[supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Parity') count += 1 self.blocksSucceed.append(_blockNumber) data_sender.A('new-data') lg.out(10, ' !!!!!! %d NEW DATA segments reconstructed, blockIndex=%d' % ( count, self.blockIndex)) else: lg.out(10, ' NO CHANGES, blockIndex=%d' % self.blockIndex) self.blockIndex -= 1 reactor.callLater(0, self._start_one_block) # @UndefinedVariable
def packet_in_callback(backupID, newpacket): global _WorkingRestoreProgress global OnRestorePacketFunc SupplierNumber = newpacket.SupplierNumber() lg.out(12, 'restore_monitor.packet_in_callback %s from suppier %s' % (backupID, SupplierNumber)) # want to count the data we restoring if SupplierNumber not in _WorkingRestoreProgress[backupID].keys(): _WorkingRestoreProgress[backupID][SupplierNumber] = 0 _WorkingRestoreProgress[backupID][SupplierNumber] += len(newpacket.Payload) packetID = global_id.CanonicalID(newpacket.PacketID) backup_matrix.LocalFileReport(packetID) if OnRestorePacketFunc is not None: OnRestorePacketFunc(backupID, SupplierNumber, newpacket)
def _file_received(self, newpacket, state): if state in ['in queue', 'shutdown', 'exist', 'failed']: return if state != 'received': lg.warn("incorrect state [%s] for packet %s" % (str(state), str(newpacket))) return if not newpacket.Valid(): # TODO: if we didn't get a valid packet ... re-request it or delete # it? lg.warn("%s is not a valid packet: %r" % (newpacket.PacketID, newpacket)) return # packetID = newpacket.PacketID packetID = global_id.CanonicalID(newpacket.PacketID) customer, remotePath = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, remotePath) if os.path.isfile(filename): lg.warn("found existed file" + filename) self.automat('inbox-data-packet', packetID) return # try: # os.remove(filename) # except: # lg.exc() dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.out( 2, "backup_rebuilder._file_received ERROR can not create sub dir " + dirname) return if not bpio.WriteFile(filename, newpacket.Payload): lg.out(2, "backup_rebuilder._file_received ERROR writing " + filename) return from storage import backup_matrix backup_matrix.LocalFileReport(packetID) lg.out(10, "backup_rebuilder._file_received and wrote to " + filename) self.automat('inbox-data-packet', packetID)