def isMoreNeeded(self, *args, **kwargs): """ Condition method. """ # if _Debug: # lg.out(_DebugLevel, 'fire_hire.isMoreNeeded current=%d dismiss=%d needed=%d' % ( # contactsdb.num_suppliers(), len(self.dismiss_list), # settings.getSuppliersNumberDesired())) if id_url.is_some_empty(contactsdb.suppliers()): if _Debug: lg.out(_DebugLevel, 'fire_hire.isMoreNeeded found empty supplier!!!') return True if isinstance(args[0], list): dismissed = args[0] else: dismissed = self.dismiss_list s = set(id_url.to_bin_list(contactsdb.suppliers())) s.difference_update(set(id_url.to_bin_list(dismissed))) result = len(s) < settings.getSuppliersNumberDesired() if _Debug: lg.out( _DebugLevel, 'fire_hire.isMoreNeeded %d %d %d %d, result=%s' % (contactsdb.num_suppliers(), len(dismissed), len(s), settings.getSuppliersNumberDesired(), result)) return result
def test_dict_of_lists(self): self._cache_identity('alice') self._cache_identity('bob') self._cache_identity('carl') self._cache_identity('frank') d = {} d[id_url.field(alice_text)] = [] d[id_url.field(bob)] = [] d[id_url.field(alice_text)].append(id_url.field(carl)) d[id_url.field(alice_text)].append(id_url.field(frank_1)) d[id_url.field(bob)].append(id_url.field('')) d[id_url.field(bob)].append(id_url.field(frank_2)) d[id_url.field(bob)].append(id_url.field(None)) d[id_url.field(bob)].append(id_url.field(b'')) self.assertIn(id_url.field(frank_1), d[id_url.field(alice_text)]) self.assertIn(id_url.field(frank_1), d[id_url.field(bob)]) self.assertFalse(id_url.is_some_empty(d[id_url.field(alice_text)])) self.assertTrue(id_url.is_some_empty(d[id_url.field(bob)])) self.assertEqual(len(id_url.fields_list(d[id_url.field(bob)])), 4)
def _on_my_dht_relations_discovered(self, dht_result): from p2p import p2p_service from contacts import contactsdb from userid import my_id from userid import id_url from crypt import my_keys from logs import lg if not (dht_result and isinstance(dht_result, dict) and len(dht_result.get('suppliers', [])) > 0): lg.warn('no dht records found for my customer family') return if id_url.is_some_empty(contactsdb.suppliers()): lg.warn( 'some of my suppliers are not hired yet, skip doing any changes' ) return suppliers_to_be_dismissed = set() dht_suppliers = id_url.to_bin_list(dht_result['suppliers']) # clean up old suppliers for idurl in dht_suppliers: if not idurl: continue if not contactsdb.is_supplier(idurl): lg.warn('dht relation with %r is not valid anymore' % idurl) suppliers_to_be_dismissed.add(idurl) for supplier_idurl in suppliers_to_be_dismissed: service_info = {} my_customer_key_id = my_id.getGlobalID(key_alias='customer') if my_keys.is_key_registered(my_customer_key_id): service_info['customer_public_key'] = my_keys.get_key_info( key_id=my_customer_key_id, include_private=False, include_signature=False, generate_signature=False, ) p2p_service.SendCancelService( remote_idurl=supplier_idurl, service_name='service_supplier', json_payload=service_info, ) if suppliers_to_be_dismissed: lg.info( 'found %d suppliers to be cleaned and sent CancelService() packets' % len(suppliers_to_be_dismissed))
def IsAllHired(): """ """ if settings.getSuppliersNumberDesired() < 0: # I must know how many suppliers I want lg.warn('my desired number of suppliers not set') return False if contactsdb.num_suppliers() != settings.getSuppliersNumberDesired(): # I must have exactly that amount of suppliers already if _Debug: lg.args(_DebugLevel, desiried_suppliers=settings.getSuppliersNumberDesired(), current_suppliers=contactsdb.num_suppliers(), ) return False if id_url.is_some_empty(contactsdb.suppliers()): # I must know all of my suppliers if _Debug: lg.args(_DebugLevel, my_suppliers=contactsdb.suppliers()) return False return True
def doDecideToDismiss(self, *args, **kwargs): """ Action method. """ global _SuppliersToFire from p2p import p2p_connector from p2p import network_connector from customer import supplier_connector from p2p import online_status # take any actions only if I am connected to the network if not p2p_connector.A() or not network_connector.A(): if _Debug: lg.out( _DebugLevel, 'fire_hire.doDecideToDismiss p2p_connector() is not ready yet, SKIP' ) self.automat('made-decision', []) return if not network_connector.A(): if _Debug: lg.out( _DebugLevel, 'fire_hire.doDecideToDismiss network_connector() is not ready yet, SKIP' ) self.automat('made-decision', []) return if p2p_connector.A().state != 'CONNECTED' or network_connector.A( ).state != 'CONNECTED': if _Debug: lg.out( _DebugLevel, 'fire_hire.doDecideToDismiss p2p/network is not connected at the moment, SKIP' ) self.automat('made-decision', []) return # if certain suppliers needs to be removed by manual/external request just do that to_be_fired = id_url.to_list(set(_SuppliersToFire)) _SuppliersToFire = [] if to_be_fired: lg.info('going to fire %d suppliers from external request' % len(to_be_fired)) self.automat('made-decision', to_be_fired) return # make sure to not go too far when i just want to decrease number of my suppliers number_desired = settings.getSuppliersNumberDesired() redundant_suppliers = set() if contactsdb.num_suppliers() > number_desired: for supplier_index in range(number_desired, contactsdb.num_suppliers()): idurl = contactsdb.supplier(supplier_index) if idurl: lg.info('found REDUNDANT supplier %s at position %d' % ( idurl, supplier_index, )) redundant_suppliers.add(idurl) if redundant_suppliers: result = list(redundant_suppliers) lg.info('will replace redundant suppliers: %s' % result) self.automat('made-decision', result) return # now I need to look more careful at my suppliers potentialy_fired = set() connected_suppliers = set() disconnected_suppliers = set() requested_suppliers = set() online_suppliers = set() offline_suppliers = set() # if you have some empty suppliers need to get rid of them, # but no need to dismiss anyone at the moment. my_suppliers = contactsdb.suppliers() if _Debug: lg.args(_DebugLevel, my_suppliers=my_suppliers) if id_url.is_some_empty(my_suppliers): lg.warn('SKIP, found empty supplier') self.automat('made-decision', []) return for supplier_idurl in my_suppliers: sc = supplier_connector.by_idurl(supplier_idurl) if not sc: lg.warn('SKIP, supplier connector for supplier %s not exist' % supplier_idurl) continue if sc.state == 'NO_SERVICE': lg.warn('found "NO_SERVICE" supplier: %s' % supplier_idurl) disconnected_suppliers.add(supplier_idurl) potentialy_fired.add(supplier_idurl) elif sc.state == 'CONNECTED': connected_suppliers.add(supplier_idurl) elif sc.state in [ 'DISCONNECTED', 'REFUSE', ]: disconnected_suppliers.add(supplier_idurl) # elif sc.state in ['QUEUE?', 'REQUEST', ]: # requested_suppliers.add(supplier_idurl) if online_status.isOffline(supplier_idurl): offline_suppliers.add(supplier_idurl) elif online_status.isOnline(supplier_idurl): online_suppliers.add(supplier_idurl) elif online_status.isCheckingNow(supplier_idurl): requested_suppliers.add(supplier_idurl) if not connected_suppliers or not online_suppliers: lg.warn('SKIP, no ONLINE suppliers found at the moment') self.automat('made-decision', []) return if requested_suppliers: lg.warn('SKIP, still waiting response from some of suppliers') self.automat('made-decision', []) return if not disconnected_suppliers: if _Debug: lg.out( _DebugLevel, 'fire_hire.doDecideToDismiss SKIP, no OFFLINE suppliers found at the moment' ) # TODO: add more conditions to fire "slow" suppliers - they are still connected but useless self.automat('made-decision', []) return if len(offline_suppliers) + len(online_suppliers) != number_desired: lg.warn('SKIP, offline + online != total count: %s %s %s' % (offline_suppliers, online_suppliers, number_desired)) self.automat('made-decision', []) return max_offline_suppliers_count = eccmap.GetCorrectableErrors( number_desired) if len(offline_suppliers) > max_offline_suppliers_count: lg.warn( 'SKIP, too many OFFLINE suppliers at the moment : %d > %d' % ( len(offline_suppliers), max_offline_suppliers_count, )) self.automat('made-decision', []) return critical_offline_suppliers_count = eccmap.GetFireHireErrors( number_desired) if len(offline_suppliers) >= critical_offline_suppliers_count and len( offline_suppliers) > 0: if config.conf().getBool( 'services/employer/replace-critically-offline-enabled'): # TODO: check that issue # too aggressive replacing suppliers who still have the data is very dangerous !!! one_dead_supplier = offline_suppliers.pop() lg.warn( 'found "CRITICALLY_OFFLINE" supplier %s, max offline limit is %d' % ( one_dead_supplier, critical_offline_suppliers_count, )) potentialy_fired.add(one_dead_supplier) if not potentialy_fired: if _Debug: lg.out( _DebugLevel, 'fire_hire.doDecideToDismiss found no "bad" suppliers, all is good !!!!!' ) self.automat('made-decision', []) return # only replace suppliers one by one at the moment result = list(potentialy_fired) lg.info('will replace supplier %s' % result[0]) self.automat('made-decision', [ result[0], ])
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done', 0) return from storage import backup_matrix from storage import backup_fs backup_matrix.ReadLocalFiles() progress = 0 # if _Debug: # lg.out(_DebugLevel, 'data_sender.doScanAndQueue with %d known customers' % len(contactsdb.known_customers())) for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getIDURL(): # TODO: check that later if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if not known_suppliers or id_url.is_some_empty(known_suppliers): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue found empty supplier(s) for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids( list(backup_matrix.local_files().keys()), True) if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue found %d known suppliers for customer %r with %d backups' % (len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue customerGlobalID, pathID, _ = packetid.SplitBackupID( backupID, normalize_key_alias=True) keyAlias = packetid.KeyAlias(customerGlobalID) item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID( customer_idurl, keyAlias)) if not item: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r path not exist in catalog' % backupID) continue if item.key_id and customerGlobalID and customerGlobalID != item.key_id: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r key is different in the catalog: %r ~ %r' % ( backupID, customerGlobalID, item.key_id, )) continue packetsBySupplier = backup_matrix.ScanBlocksToSend( backupID, limit_per_supplier=None) total_for_customer = sum( [len(v) for v in packetsBySupplier.values()]) if total_for_customer: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue sending %r for customer %r with %d pieces' % (item.name(), customer_idurl, total_for_customer)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) if supplierNum >= 0 and supplierNum < len( known_suppliers): supplier_idurl = known_suppliers[supplierNum] else: supplier_idurl = None if not supplier_idurl: lg.warn( 'skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % (supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp( packetID) if backupID_ != backupID: lg.warn( 'skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn( 'skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue( supplier_idurl, packetID): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue %s already in sending queue for %r' % (packetID, supplier_idurl)) continue latest_progress = self.statistic.get( supplier_idurl, {}).get('latest', '') if len(latest_progress ) >= 3 and latest_progress.endswith('---'): if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending to supplier %r because multiple packets already failed' % supplier_idurl) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending, queue is busy for %r' % supplier_idurl) continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue %s is not a file' % filename) continue itemInfo = item.to_json() if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getIDURL(), lambda packet, ownerID, packetID: self._packetAcked( packet, ownerID, packetID, itemInfo), lambda remoteID, packetID, why: self._packetFailed( remoteID, packetID, why, itemInfo), ): progress += 1 if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue for %r put %s in the queue progress=%d' % ( item.name(), packetID, progress, )) else: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done', progress)
def test_in_list(self): self._cache_identity('alice') self._cache_identity('bob') self._cache_identity('carl') l = [ id_url.field(alice_text), id_url.field(bob), id_url.field(frank_1), id_url.field(frank_2), ] with self.assertRaises(KeyError): (id_url.field(carl) not in l) self.assertTrue(id_url.field(alice_text) in l) with self.assertRaises(KeyError): (id_url.field(b'http://fake.com/frank.xml') not in l) with self.assertRaises(KeyError): (id_url.field(ethan_not_exist) not in l) self.assertTrue(id_url.field(bob) in l) with self.assertRaises(KeyError): (id_url.field(frank_1) in l) with self.assertRaises(KeyError): (id_url.field(frank_2) in l) self.assertTrue(len(l), 4) with self.assertRaises(KeyError): (l[0] != l[3]) with self.assertRaises(KeyError): (l[2] == l[3]) self._cache_identity('frank') self.assertTrue(l[2] == l[3]) self.assertTrue(l[0] != l[3]) self.assertIn(id_url.field(frank_1), l) self.assertFalse(id_url.is_some_empty(l)) self.assertEqual(l.count(None), 0) self.assertEqual(id_url.empty_count(l), 0) l += [ id_url.field(b''), ] * 3 self.assertEqual(l.count(None), 3) self.assertEqual(l.count(b''), 3) self.assertEqual(l.count(''), 3) self.assertEqual(l.count(id_url.field(None)), 3) self.assertEqual(l.count(id_url.field(b'')), 3) self.assertEqual(l.count(id_url.field('')), 3) self.assertEqual(id_url.fields_list([ b'', ]), [ b'', ]) self.assertEqual(id_url.fields_list([ b'', ]), [ None, ]) self.assertEqual(id_url.fields_list([ id_url.field(''), ]), [ None, ]) self.assertEqual(id_url.fields_list([ None, ]), [ id_url.field(''), ]) self.assertNotEqual(id_url.fields_list([None, None]), [ id_url.field(''), ]) self.assertEqual(len(id_url.fields_list([ None, ])), 1)
def test_empty(self): self.assertTrue(id_url.is_empty(id_url.field(b''))) self.assertTrue(id_url.is_empty(id_url.field(''))) self.assertTrue(id_url.is_empty(id_url.field(None))) self.assertTrue(id_url.is_empty(id_url.field(b'None'))) self.assertTrue(id_url.is_empty(None)) self.assertTrue(id_url.is_empty(b'')) self.assertTrue(id_url.is_empty('')) self.assertTrue(id_url.is_empty(b'None')) self.assertTrue(id_url.is_empty('None')) self.assertFalse(id_url.field(b'')) self.assertFalse(id_url.field('')) self.assertFalse(id_url.field(None)) self.assertFalse(id_url.field('None')) self.assertFalse(id_url.field(b'None')) self.assertTrue(bool(id_url.field(b'')) is False) self.assertTrue(bool(id_url.field('')) is False) self.assertTrue(bool(id_url.field(None)) is False) self.assertTrue(bool(id_url.field('None')) is False) self.assertTrue(bool(id_url.field(b'None')) is False) self.assertTrue(id_url.field(b'') is not None) self.assertTrue(id_url.field('') is not None) self.assertFalse(id_url.field(None) is None) self.assertTrue(id_url.field(None) is not None) self.assertTrue(id_url.field(b'') == b'') self.assertTrue(id_url.field('') == '') self.assertTrue(id_url.field('') == b'') self.assertTrue(id_url.field(None) == '') self.assertTrue(id_url.field(None) == b'') l = [ b'', None, '', id_url.field(''), id_url.field(None), id_url.field(b''), ] self.assertIn(b'', l) self.assertIn('', l) self.assertIn(None, l) self.assertTrue(id_url.is_some_empty(l)) self.assertEqual(l.count(None), 4) self.assertEqual(id_url.empty_count(l), 6) self.assertTrue(None in [ id_url.field(None), ]) self.assertTrue(None in [ id_url.field(b''), ]) self.assertTrue(b'' in [ id_url.field(None), ]) self.assertTrue(b'' in [ id_url.field(b''), ]) self.assertTrue(id_url.is_in(None, [ id_url.field(None), ])) self.assertTrue(id_url.is_in(None, [ id_url.field(b''), ])) self.assertTrue(id_url.is_in(b'', [ id_url.field(None), ])) self.assertTrue(id_url.is_in(b'', [ id_url.field(b''), ])) d = {id_url.field(''): 0, id_url.field(None): 1, id_url.field(b''): 2} self.assertTrue(len(d), 1) self.assertTrue(b'' in d) self.assertTrue('' in d) self.assertFalse(b'' not in d) self.assertFalse('' not in d) self.assertNotIn(None, d) self.assertIn(id_url.field(''), d) self.assertIn(id_url.field(b''), d) self.assertIn(id_url.field(None), d)
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done', 0) return from storage import backup_matrix from storage import backup_fs backup_matrix.ReadLocalFiles() progress = 0 if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue with %d known customers' % len(contactsdb.known_customers())) for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getLocalID(): # TODO: check that later if _Debug: lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if not known_suppliers or id_url.is_some_empty(known_suppliers): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue found empty supplier(s) for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids(list(backup_matrix.local_files().keys()), True) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue found %d known suppliers for customer %r with %d backups' % ( len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue customerGlobalID, pathID, _ = packetid.SplitBackupID(backupID, normalize_key_alias=True) item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID(customer_idurl=customer_idurl)) if not item: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r path not exist in catalog' % backupID) continue if item.key_id and customerGlobalID and customerGlobalID != item.key_id: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r key is different in the catalog' % backupID) continue packetsBySupplier = backup_matrix.ScanBlocksToSend(backupID, limit_per_supplier=None) total_for_customer = sum([len(v) for v in packetsBySupplier.values()]) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue to be delivered for customer %r : %d' % (customer_idurl, total_for_customer)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) if supplierNum >= 0 and supplierNum < len(known_suppliers): supplier_idurl = known_suppliers[supplierNum] else: supplier_idurl = None if not supplier_idurl: lg.warn('skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % ( supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp(packetID) if backupID_ != backupID: lg.warn('skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % ( packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn('skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % ( packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue(supplier_idurl, packetID): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue %s already in sending queue for %r' % (packetID, supplier_idurl)) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue skip sending, queue is busy for %r\n' % supplier_idurl) continue # customerGlobalID, pathID = packetid.SplitPacketID(packetID) # tranByID = gate.transfers_out_by_idurl().get(supplier_idurl, []) # if len(tranByID) > 3: # log.write(u'transfers by %s: %d\n' % (supplier_idurl, len(tranByID))) # continue customerGlobalID, pathID = packetid.SplitPacketID(packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue %s is not a file\n' % filename) continue if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getIDURL(), self._packetAcked, self._packetFailed, ): progress += 1 if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue put %s in the queue progress=%d' % (packetID, progress, )) else: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done', progress)
def _request_files(self): from storage import backup_matrix from stream import io_throttle from stream import data_sender self.missingPackets = 0 # here we want to request some packets before we start working to # rebuild the missed blocks availableSuppliers = backup_matrix.GetActiveArray( customer_idurl=self.currentCustomerIDURL) # remember how many requests we did on this iteration total_requests_count = 0 # at the moment I do download everything I have available and needed if id_url.is_some_empty( contactsdb.suppliers( customer_idurl=self.currentCustomerIDURL)): if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files SKIP - empty supplier') self.automat('no-requests') return for supplierNum in range( contactsdb.num_suppliers( customer_idurl=self.currentCustomerIDURL)): supplierID = contactsdb.supplier( supplierNum, customer_idurl=self.currentCustomerIDURL) if not supplierID: continue requests_count = 0 # we do requests in reverse order because we start rebuilding from # the last block for blockIndex in range(len(self.workingBlocksQueue) - 1, -1, -1): blockNum = self.workingBlocksQueue[blockIndex] # do not keep too many requests in the queue if io_throttle.GetRequestQueueLength(supplierID) >= 16: break # also don't do too many requests at once if requests_count > 16: break remoteData = backup_matrix.GetRemoteDataArray( self.currentBackupID, blockNum) remoteParity = backup_matrix.GetRemoteParityArray( self.currentBackupID, blockNum) localData = backup_matrix.GetLocalDataArray( self.currentBackupID, blockNum) localParity = backup_matrix.GetLocalParityArray( self.currentBackupID, blockNum) if supplierNum >= len(remoteData) or supplierNum >= len( remoteParity): break if supplierNum >= len(localData) or supplierNum >= len( localParity): break # if remote Data exist and is available because supplier is on-line, # but we do not have it on hand - do request if localData[supplierNum] == 0: PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Data') if remoteData[supplierNum] == 1: if availableSuppliers[supplierNum]: # if supplier is not alive - we can't request from him if not io_throttle.HasPacketInRequestQueue( supplierID, PacketID): customer, remotePath = packetid.SplitPacketID( PacketID) filename = os.path.join( settings.getLocalBackupsDir(), customer, remotePath, ) if not os.path.exists(filename): if io_throttle.QueueRequestFile( self._file_received, my_id.getIDURL(), PacketID, my_id.getIDURL(), supplierID): requests_count += 1 else: # count this packet as missing self.missingPackets += 1 # also mark this guy as one who dont have any data - nor local nor remote else: # but if local Data already exists, but was not sent - do it now if remoteData[supplierNum] != 1: data_sender.A('new-data') # same for Parity if localParity[supplierNum] == 0: PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Parity') if remoteParity[supplierNum] == 1: if availableSuppliers[supplierNum]: if not io_throttle.HasPacketInRequestQueue( supplierID, PacketID): customer, remotePath = packetid.SplitPacketID( PacketID) filename = os.path.join( settings.getLocalBackupsDir(), customer, remotePath, ) if not os.path.exists(filename): if io_throttle.QueueRequestFile( self._file_received, my_id.getIDURL(), PacketID, my_id.getIDURL(), supplierID, ): requests_count += 1 else: self.missingPackets += 1 else: # but if local Parity already exists, but was not sent - do it now if remoteParity[supplierNum] != 1: data_sender.A('new-data') total_requests_count += requests_count if total_requests_count > 0: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : %d chunks requested' % total_requests_count) self.automat('requests-sent', total_requests_count) else: if self.missingPackets: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : found %d missing packets' % self.missingPackets) self.automat('found-missing') else: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : nothing was requested' ) self.automat('no-requests')