def stop(self): from main import events from stream import io_throttle from stream import data_sender from stream import data_receiver events.remove_subscriber(self._on_identity_url_changed, 'identity-url-changed') data_receiver.A('shutdown') data_sender.SetShutdownFlag() data_sender.A('shutdown') io_throttle.shutdown() return True
def init(self): """ Initialize needed variables. """ self.currentBackupID = None # currently working on this backup self.currentCustomerIDURL = None # stored by this customer # list of missing blocks we work on for current backup self.workingBlocksQueue = [] self.backupsWasRebuilt = [] self.missingPackets = 0 self.log_transitions = _Debug from stream import data_sender data_sender.A().addStateChangedCallback( self._on_data_sender_state_changed)
def start(self): from logs import lg from customer import fire_hire if not fire_hire.IsAllHired(): lg.warn('service_data_motion() can not start right now, not all suppliers hired yet') return False from main import events from stream import io_throttle from stream import data_sender from stream import data_receiver io_throttle.init() data_sender.A('init') data_receiver.A('init') events.add_subscriber(self._on_identity_url_changed, 'identity-url-changed') return True
def OnNewDataPrepared(): """ """ data_sender.A('new-data')
def _on_supplier_modified(self, evt): from stream import data_sender data_sender.A('restart')
def A(self, event, *args, **kwargs): from customer import fire_hire from customer import list_files_orator from storage import backup_rebuilder from storage import index_synchronizer from stream import data_sender #---READY--- if self.state == 'READY': if event == 'timer-5sec': self.doOverallCheckUp(*args, **kwargs) elif event == 'restart' or event == 'suppliers-changed' or ( event == 'instant' and self.RestartAgain): self.state = 'FIRE_HIRE' self.RestartAgain = False self.doRememberSuppliers(*args, **kwargs) fire_hire.A('restart') #---LIST_FILES--- elif self.state == 'LIST_FILES': if (event == 'list_files_orator.state' and args[0] == 'NO_FILES'): self.state = 'READY' elif (event == 'list_files_orator.state' and args[0] == 'SAW_FILES'): self.state = 'LIST_BACKUPS' index_synchronizer.A('pull') data_sender.A('restart') self.doPrepareListBackups(*args, **kwargs) elif event == 'restart': self.RestartAgain = True elif event == 'suppliers-changed': self.state = 'READY' self.RestartAgain = True #---LIST_BACKUPS--- elif self.state == 'LIST_BACKUPS': if event == 'list-backups-done': self.state = 'REBUILDING' backup_rebuilder.A('start') elif event == 'restart': self.RestartAgain = True elif event == 'suppliers-changed': self.state = 'READY' self.RestartAgain = True elif event == 'restart': self.state = 'FIRE_HIRE' fire_hire.A('restart') #---REBUILDING--- elif self.state == 'REBUILDING': if (event == 'backup_rebuilder.state' and args[0] in ['DONE', 'STOPPED']): self.state = 'READY' self.doCleanUpBackups(*args, **kwargs) data_sender.A('restart') elif event == 'restart' or event == 'suppliers-changed': self.state = 'FIRE_HIRE' backup_rebuilder.SetStoppedFlag() fire_hire.A('restart') #---FIRE_HIRE--- elif self.state == 'FIRE_HIRE': if event == 'suppliers-changed' and self.isSuppliersNumberChanged( *args, **kwargs): self.state = 'LIST_FILES' self.doDeleteAllBackups(*args, **kwargs) self.doRememberSuppliers(*args, **kwargs) list_files_orator.A('need-files') elif event == 'fire-hire-finished': self.state = 'LIST_FILES' list_files_orator.A('need-files') elif event == 'suppliers-changed' and not self.isSuppliersNumberChanged( *args, **kwargs): self.state = 'LIST_FILES' self.doUpdateSuppliers(*args, **kwargs) self.doRememberSuppliers(*args, **kwargs) list_files_orator.A('need-files') elif event == 'restart': self.RestartAgain = True #---AT_STARTUP--- elif self.state == 'AT_STARTUP': if event == 'init': self.state = 'READY' self.RestartAgain = False return None
def _block_finished(self, result, params): if not result: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._block_finished FAILED, blockIndex=%d' % self.blockIndex) reactor.callLater(0, self._finish_rebuilding) # @UndefinedVariable return try: newData, localData, localParity, reconstructedData, reconstructedParity = result _backupID = params[0] _blockNumber = params[1] except: lg.exc() reactor.callLater(0, self._finish_rebuilding) # @UndefinedVariable return if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._block_finished backupID=%r blockNumber=%r newData=%r' % (_backupID, _blockNumber, newData)) if _Debug: lg.out( _DebugLevel, ' localData=%r localParity=%r' % (localData, localParity)) err = False if newData: from storage import backup_matrix from stream import data_sender count = 0 customer_idurl = packetid.CustomerIDURL(_backupID) for supplierNum in range( contactsdb.num_suppliers(customer_idurl=customer_idurl)): try: localData[supplierNum] localParity[supplierNum] reconstructedData[supplierNum] reconstructedParity[supplierNum] except: err = True lg.err('invalid result from the task: %s' % repr(params)) if _Debug: lg.out(_DebugLevel, 'result is %s' % repr(result)) break if localData[supplierNum] == 1 and reconstructedData[ supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Data') count += 1 if localParity[supplierNum] == 1 and reconstructedParity[ supplierNum] == 1: backup_matrix.LocalFileReport(None, _backupID, _blockNumber, supplierNum, 'Parity') count += 1 if err: lg.err('seems suppliers were changed, stop rebuilding') reactor.callLater( 0, self._finish_rebuilding) # @UndefinedVariable return self.blocksSucceed.append(_blockNumber) data_sender.A('new-data') if _Debug: lg.out( _DebugLevel, ' !!!!!! %d NEW DATA segments reconstructed, blockIndex=%d' % ( count, self.blockIndex, )) else: if _Debug: lg.out(_DebugLevel, ' NO CHANGES, blockIndex=%d' % self.blockIndex) self.blockIndex -= 1 reactor.callLater(0, self._start_one_block) # @UndefinedVariable
def _request_files(self): from storage import backup_matrix from stream import io_throttle from stream import data_sender self.missingPackets = 0 # here we want to request some packets before we start working to # rebuild the missed blocks availableSuppliers = backup_matrix.GetActiveArray( customer_idurl=self.currentCustomerIDURL) # remember how many requests we did on this iteration total_requests_count = 0 # at the moment I do download everything I have available and needed if id_url.is_some_empty( contactsdb.suppliers( customer_idurl=self.currentCustomerIDURL)): if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files SKIP - empty supplier') self.automat('no-requests') return for supplierNum in range( contactsdb.num_suppliers( customer_idurl=self.currentCustomerIDURL)): supplierID = contactsdb.supplier( supplierNum, customer_idurl=self.currentCustomerIDURL) if not supplierID: continue requests_count = 0 # we do requests in reverse order because we start rebuilding from # the last block for blockIndex in range(len(self.workingBlocksQueue) - 1, -1, -1): blockNum = self.workingBlocksQueue[blockIndex] # do not keep too many requests in the queue if io_throttle.GetRequestQueueLength(supplierID) >= 16: break # also don't do too many requests at once if requests_count > 16: break remoteData = backup_matrix.GetRemoteDataArray( self.currentBackupID, blockNum) remoteParity = backup_matrix.GetRemoteParityArray( self.currentBackupID, blockNum) localData = backup_matrix.GetLocalDataArray( self.currentBackupID, blockNum) localParity = backup_matrix.GetLocalParityArray( self.currentBackupID, blockNum) if supplierNum >= len(remoteData) or supplierNum >= len( remoteParity): break if supplierNum >= len(localData) or supplierNum >= len( localParity): break # if remote Data exist and is available because supplier is on-line, # but we do not have it on hand - do request if localData[supplierNum] == 0: PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Data') if remoteData[supplierNum] == 1: if availableSuppliers[supplierNum]: # if supplier is not alive - we can't request from him if not io_throttle.HasPacketInRequestQueue( supplierID, PacketID): customer, remotePath = packetid.SplitPacketID( PacketID) filename = os.path.join( settings.getLocalBackupsDir(), customer, remotePath, ) if not os.path.exists(filename): if io_throttle.QueueRequestFile( self._file_received, my_id.getIDURL(), PacketID, my_id.getIDURL(), supplierID): requests_count += 1 else: # count this packet as missing self.missingPackets += 1 # also mark this guy as one who dont have any data - nor local nor remote else: # but if local Data already exists, but was not sent - do it now if remoteData[supplierNum] != 1: data_sender.A('new-data') # same for Parity if localParity[supplierNum] == 0: PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Parity') if remoteParity[supplierNum] == 1: if availableSuppliers[supplierNum]: if not io_throttle.HasPacketInRequestQueue( supplierID, PacketID): customer, remotePath = packetid.SplitPacketID( PacketID) filename = os.path.join( settings.getLocalBackupsDir(), customer, remotePath, ) if not os.path.exists(filename): if io_throttle.QueueRequestFile( self._file_received, my_id.getIDURL(), PacketID, my_id.getIDURL(), supplierID, ): requests_count += 1 else: self.missingPackets += 1 else: # but if local Parity already exists, but was not sent - do it now if remoteParity[supplierNum] != 1: data_sender.A('new-data') total_requests_count += requests_count if total_requests_count > 0: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : %d chunks requested' % total_requests_count) self.automat('requests-sent', total_requests_count) else: if self.missingPackets: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : found %d missing packets' % self.missingPackets) self.automat('found-missing') else: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : nothing was requested' ) self.automat('no-requests')
def shutdown(self): from stream import data_sender data_sender.A().removeStateChangedCallback( self._on_data_sender_state_changed)