Esempio n. 1
0
def OnJobDone(backupID, result):
    """
    A callback method fired when backup is finished.

    Here we need to save the index data base.
    """
    from storage import backup_rebuilder
    # from customer import io_throttle
    lg.info('job done [%s] with result "%s", %d more tasks' %
            (backupID, result, len(tasks())))
    jobs().pop(backupID)
    customerGlobalID, remotePath, version = packetid.SplitBackupID(backupID)
    customer_idurl = global_id.GlobalUserToIDURL(customerGlobalID)
    if result == 'done':
        maxBackupsNum = settings.getBackupsMaxCopies()
        if maxBackupsNum:
            item = backup_fs.GetByID(remotePath,
                                     iterID=backup_fs.fsID(customer_idurl))
            if item:
                versions = item.list_versions(sorted=True, reverse=True)
                if len(versions) > maxBackupsNum:
                    for version in versions[maxBackupsNum:]:
                        item.delete_version(version)
                        backupID = packetid.MakeBackupID(
                            customerGlobalID, remotePath, version)
                        backup_rebuilder.RemoveBackupToWork(backupID)
                        # io_throttle.DeleteBackupRequests(backupID)
                        # io_throttle.DeleteBackupSendings(backupID)
                        # callback.delete_backup_interest(backupID)
                        backup_fs.DeleteLocalBackup(
                            settings.getLocalBackupsDir(), backupID)
                        backup_matrix.EraseBackupLocalInfo(backupID)
                        backup_matrix.EraseBackupLocalInfo(backupID)
        backup_fs.ScanID(remotePath)
        backup_fs.Calculate()
        Save()
        control.request_update([
            ('pathID', remotePath),
        ])
        # TODO: check used space, if we have over use - stop all tasks immediately
        backup_matrix.RepaintBackup(backupID)
    elif result == 'abort':
        DeleteBackup(backupID)
    if len(tasks()) == 0:
        # do we really need to restart backup_monitor after each backup?
        # if we have a lot tasks started this will produce a lot unneeded actions
        # will be smarter to restart it once we finish all tasks
        # because user will probably leave BitDust working after starting a long running operations
        from storage import backup_monitor
        if _Debug:
            lg.out(
                _DebugLevel,
                'backup_control.OnJobDone restarting backup_monitor() machine because no tasks left'
            )
        backup_monitor.A('restart')
    reactor.callLater(0, RunTask)  # @UndefinedVariable
    reactor.callLater(0, FireTaskFinishedCallbacks, remotePath, version,
                      result)  # @UndefinedVariable
Esempio n. 2
0
def DeletePathBackups(pathID,
                      removeLocalFilesToo=True,
                      saveDB=True,
                      calculate=True):
    """
    This removes all backups of given path ID
    Doing same operations as ``DeleteBackup()``.
    """
    from . import backup_rebuilder
    from customer import io_throttle
    pathID = global_id.CanonicalID(pathID)
    # get the working item
    customer, remotePath = packetid.SplitPacketID(pathID)
    customer_idurl = global_id.GlobalUserToIDURL(customer)
    item = backup_fs.GetByID(remotePath, iterID=backup_fs.fsID(customer_idurl))
    if item is None:
        return False
    lg.out(8, 'backup_control.DeletePathBackups ' + pathID)
    # this is a list of all known backups of this path
    versions = item.list_versions()
    for version in versions:
        backupID = packetid.MakeBackupID(customer, remotePath, version)
        lg.out(8, '        removing %s' % backupID)
        # abort backup if it just started and is running at the moment
        AbortRunningBackup(backupID)
        # if we requested for files for this backup - we do not need it anymore
        io_throttle.DeleteBackupRequests(backupID)
        io_throttle.DeleteBackupSendings(backupID)
        # remove interests in transport_control
        # callback.delete_backup_interest(backupID)
        # remove local files for this backupID
        if removeLocalFilesToo:
            backup_fs.DeleteLocalBackup(settings.getLocalBackupsDir(),
                                        backupID)
        # remove remote info for this backup from the memory
        backup_matrix.EraseBackupRemoteInfo(backupID)
        # also remove local info
        backup_matrix.EraseBackupLocalInfo(backupID)
        # finally remove this backup from the index
        item.delete_version(version)
        # lg.out(8, 'backup_control.DeletePathBackups ' + backupID)
    # stop any rebuilding, we will restart it soon
    backup_rebuilder.RemoveAllBackupsToWork()
    backup_rebuilder.SetStoppedFlag()
    # check and calculate used space
    if calculate:
        backup_fs.Scan()
        backup_fs.Calculate()
    # save the index if needed
    if saveDB:
        Save()
        control.request_update()
    return True
Esempio n. 3
0
def test():
    """
    For tests.
    """
    #    backup_fs.Calculate()
    #    print backup_fs.counter()
    #    print backup_fs.numberfiles()
    #    print backup_fs.sizefiles()
    #    print backup_fs.sizebackups()
    pprint.pprint(backup_fs.fsID())
    pprint.pprint(backup_fs.fs())
    print(backup_fs.GetByID('0'))
Esempio n. 4
0
def OnFoundFolderSize(pth, sz, arg):
    """
    This is a callback, fired from ``lib.dirsize.ask()`` method after finish
    calculating of folder size.
    """
    try:
        pathID, version = arg
        customerGlobID, pathID = packetid.SplitPacketID(pathID)
        customerIDURL = global_id.GlobalUserToIDURL(customerGlobID)
        item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID(customerIDURL))
        if item:
            item.set_size(sz)
            backup_fs.Calculate()
            Save()
        if version:
            backupID = packetid.MakeBackupID(customerGlobID, pathID, version)
            job = GetRunningBackupObject(backupID)
            if job:
                job.totalSize = sz
        if _Debug:
            lg.out(_DebugLevel, 'backup_control.OnFoundFolderSize %s %d' % (backupID, sz))
    except:
        lg.exc()
Esempio n. 5
0
 def doScanAndQueue(self, *args, **kwargs):
     """
     Action method.
     """
     global _ShutdownFlag
     if _ShutdownFlag:
         if _Debug:
             lg.out(_DebugLevel,
                    'data_sender.doScanAndQueue   _ShutdownFlag is True\n')
         self.automat('scan-done', 0)
         return
     from storage import backup_matrix
     from storage import backup_fs
     backup_matrix.ReadLocalFiles()
     progress = 0
     # if _Debug:
     #     lg.out(_DebugLevel, 'data_sender.doScanAndQueue    with %d known customers' % len(contactsdb.known_customers()))
     for customer_idurl in contactsdb.known_customers():
         if customer_idurl != my_id.getIDURL():
             # TODO: check that later
             if _Debug:
                 lg.out(
                     _DebugLevel + 2,
                     'data_sender.doScanAndQueue  skip sending to another customer: %r'
                     % customer_idurl)
             continue
         known_suppliers = contactsdb.suppliers(customer_idurl)
         if not known_suppliers or id_url.is_some_empty(known_suppliers):
             if _Debug:
                 lg.out(
                     _DebugLevel,
                     'data_sender.doScanAndQueue    found empty supplier(s) for customer %r, SKIP'
                     % customer_idurl)
             continue
         known_backups = misc.sorted_backup_ids(
             list(backup_matrix.local_files().keys()), True)
         if _Debug:
             lg.out(
                 _DebugLevel,
                 'data_sender.doScanAndQueue    found %d known suppliers for customer %r with %d backups'
                 %
                 (len(known_suppliers), customer_idurl, len(known_backups)))
         for backupID in known_backups:
             this_customer_idurl = packetid.CustomerIDURL(backupID)
             if this_customer_idurl != customer_idurl:
                 continue
             customerGlobalID, pathID, _ = packetid.SplitBackupID(
                 backupID, normalize_key_alias=True)
             keyAlias = packetid.KeyAlias(customerGlobalID)
             item = backup_fs.GetByID(pathID,
                                      iterID=backup_fs.fsID(
                                          customer_idurl, keyAlias))
             if not item:
                 if _Debug:
                     lg.out(
                         _DebugLevel,
                         'data_sender.doScanAndQueue    skip sending backup %r path not exist in catalog'
                         % backupID)
                 continue
             if item.key_id and customerGlobalID and customerGlobalID != item.key_id:
                 if _Debug:
                     lg.out(
                         _DebugLevel,
                         'data_sender.doScanAndQueue    skip sending backup %r key is different in the catalog: %r ~ %r'
                         % (
                             backupID,
                             customerGlobalID,
                             item.key_id,
                         ))
                 continue
             packetsBySupplier = backup_matrix.ScanBlocksToSend(
                 backupID, limit_per_supplier=None)
             total_for_customer = sum(
                 [len(v) for v in packetsBySupplier.values()])
             if total_for_customer:
                 if _Debug:
                     lg.out(
                         _DebugLevel,
                         'data_sender.doScanAndQueue    sending %r for customer %r with %d pieces'
                         %
                         (item.name(), customer_idurl, total_for_customer))
                 for supplierNum in packetsBySupplier.keys():
                     # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl)
                     if supplierNum >= 0 and supplierNum < len(
                             known_suppliers):
                         supplier_idurl = known_suppliers[supplierNum]
                     else:
                         supplier_idurl = None
                     if not supplier_idurl:
                         lg.warn(
                             'skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r'
                             % (supplierNum, backupID, customer_idurl))
                         continue
                     for packetID in packetsBySupplier[supplierNum]:
                         backupID_, _, supplierNum_, _ = packetid.BidBnSnDp(
                             packetID)
                         if backupID_ != backupID:
                             lg.warn(
                                 'skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r'
                                 % (packetID, backupID, customer_idurl))
                             continue
                         if supplierNum_ != supplierNum:
                             lg.warn(
                                 'skip sending, unexpected supplierNum %s for %s, customer_idurl=%r'
                                 % (packetID, backupID, customer_idurl))
                             continue
                         if io_throttle.HasPacketInSendQueue(
                                 supplier_idurl, packetID):
                             if _Debug:
                                 lg.out(
                                     _DebugLevel,
                                     'data_sender.doScanAndQueue    %s already in sending queue for %r'
                                     % (packetID, supplier_idurl))
                             continue
                         latest_progress = self.statistic.get(
                             supplier_idurl, {}).get('latest', '')
                         if len(latest_progress
                                ) >= 3 and latest_progress.endswith('---'):
                             if _Debug:
                                 lg.out(
                                     _DebugLevel + 2,
                                     'data_sender.doScanAndQueue     skip sending to supplier %r because multiple packets already failed'
                                     % supplier_idurl)
                             continue
                         if not io_throttle.OkToSend(supplier_idurl):
                             if _Debug:
                                 lg.out(
                                     _DebugLevel + 2,
                                     'data_sender.doScanAndQueue     skip sending, queue is busy for %r'
                                     % supplier_idurl)
                             continue
                         customerGlobalID, pathID = packetid.SplitPacketID(
                             packetID)
                         filename = os.path.join(
                             settings.getLocalBackupsDir(),
                             customerGlobalID,
                             pathID,
                         )
                         if not os.path.isfile(filename):
                             if _Debug:
                                 lg.out(
                                     _DebugLevel,
                                     'data_sender.doScanAndQueue     %s is not a file'
                                     % filename)
                             continue
                         itemInfo = item.to_json()
                         if io_throttle.QueueSendFile(
                                 filename,
                                 packetID,
                                 supplier_idurl,
                                 my_id.getIDURL(),
                                 lambda packet, ownerID,
                                 packetID: self._packetAcked(
                                     packet, ownerID, packetID, itemInfo),
                                 lambda remoteID, packetID,
                                 why: self._packetFailed(
                                     remoteID, packetID, why, itemInfo),
                         ):
                             progress += 1
                             if _Debug:
                                 lg.out(
                                     _DebugLevel,
                                     'data_sender.doScanAndQueue   for %r put %s in the queue  progress=%d'
                                     % (
                                         item.name(),
                                         packetID,
                                         progress,
                                     ))
                         else:
                             if _Debug:
                                 lg.out(
                                     _DebugLevel,
                                     'data_sender.doScanAndQueue    io_throttle.QueueSendFile FAILED %s'
                                     % packetID)
     if _Debug:
         lg.out(_DebugLevel,
                'data_sender.doScanAndQueue    progress=%s' % progress)
     self.automat('scan-done', progress)
Esempio n. 6
0
 def doScanAndQueue(self, *args, **kwargs):
     """
     Action method.
     """
     global _ShutdownFlag
     if _ShutdownFlag:
         if _Debug:
             lg.out(_DebugLevel, 'data_sender.doScanAndQueue   _ShutdownFlag is True\n')
         self.automat('scan-done', 0)
         return
     from storage import backup_matrix
     from storage import backup_fs
     backup_matrix.ReadLocalFiles()
     progress = 0
     if _Debug:
         lg.out(_DebugLevel, 'data_sender.doScanAndQueue    with %d known customers' % len(contactsdb.known_customers()))
     for customer_idurl in contactsdb.known_customers():
         if customer_idurl != my_id.getLocalID():
             # TODO: check that later
             if _Debug:
                 lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue  skip sending to another customer: %r' % customer_idurl)
             continue
         known_suppliers = contactsdb.suppliers(customer_idurl)
         if not known_suppliers or id_url.is_some_empty(known_suppliers):
             if _Debug:
                 lg.out(_DebugLevel, 'data_sender.doScanAndQueue    found empty supplier(s) for customer %r, SKIP' % customer_idurl)
             continue
         known_backups = misc.sorted_backup_ids(list(backup_matrix.local_files().keys()), True)
         if _Debug:
             lg.out(_DebugLevel, 'data_sender.doScanAndQueue    found %d known suppliers for customer %r with %d backups' % (
                 len(known_suppliers), customer_idurl, len(known_backups)))
         for backupID in known_backups:
             this_customer_idurl = packetid.CustomerIDURL(backupID)
             if this_customer_idurl != customer_idurl:
                 continue
             customerGlobalID, pathID, _ = packetid.SplitBackupID(backupID, normalize_key_alias=True)
             item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID(customer_idurl=customer_idurl))
             if not item:
                 if _Debug:
                     lg.out(_DebugLevel, 'data_sender.doScanAndQueue    skip sending backup %r path not exist in catalog' % backupID)
                 continue
             if item.key_id and customerGlobalID and customerGlobalID != item.key_id:
                 if _Debug:
                     lg.out(_DebugLevel, 'data_sender.doScanAndQueue    skip sending backup %r key is different in the catalog' % backupID)
                 continue
             packetsBySupplier = backup_matrix.ScanBlocksToSend(backupID, limit_per_supplier=None)
             total_for_customer = sum([len(v) for v in packetsBySupplier.values()])
             if _Debug:
                 lg.out(_DebugLevel, 'data_sender.doScanAndQueue    to be delivered for customer %r : %d' % (customer_idurl, total_for_customer))
             for supplierNum in packetsBySupplier.keys():
                 # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl)
                 if supplierNum >= 0 and supplierNum < len(known_suppliers):
                     supplier_idurl = known_suppliers[supplierNum]
                 else:
                     supplier_idurl = None
                 if not supplier_idurl:
                     lg.warn('skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % (
                         supplierNum, backupID, customer_idurl))
                     continue
                 for packetID in packetsBySupplier[supplierNum]:
                     backupID_, _, supplierNum_, _ = packetid.BidBnSnDp(packetID)
                     if backupID_ != backupID:
                         lg.warn('skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % (
                             packetID, backupID, customer_idurl))
                         continue
                     if supplierNum_ != supplierNum:
                         lg.warn('skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % (
                             packetID, backupID, customer_idurl))
                         continue
                     if io_throttle.HasPacketInSendQueue(supplier_idurl, packetID):
                         if _Debug:
                             lg.out(_DebugLevel, 'data_sender.doScanAndQueue    %s already in sending queue for %r' % (packetID, supplier_idurl))
                         continue
                     if not io_throttle.OkToSend(supplier_idurl):
                         if _Debug:
                             lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue     skip sending, queue is busy for %r\n' % supplier_idurl)
                         continue
                     # customerGlobalID, pathID = packetid.SplitPacketID(packetID)
                     # tranByID = gate.transfers_out_by_idurl().get(supplier_idurl, [])
                     # if len(tranByID) > 3:
                     #     log.write(u'transfers by %s: %d\n' % (supplier_idurl, len(tranByID)))
                     #     continue
                     customerGlobalID, pathID = packetid.SplitPacketID(packetID)
                     filename = os.path.join(
                         settings.getLocalBackupsDir(),
                         customerGlobalID,
                         pathID,
                     )
                     if not os.path.isfile(filename):
                         if _Debug:
                             lg.out(_DebugLevel, 'data_sender.doScanAndQueue     %s is not a file\n' % filename)
                         continue
                     if io_throttle.QueueSendFile(
                         filename,
                         packetID,
                         supplier_idurl,
                         my_id.getIDURL(),
                         self._packetAcked,
                         self._packetFailed,
                     ):
                         progress += 1
                         if _Debug:
                             lg.out(_DebugLevel, 'data_sender.doScanAndQueue   put %s in the queue  progress=%d' % (packetID, progress, ))
                     else:
                         if _Debug:
                             lg.out(_DebugLevel, 'data_sender.doScanAndQueue    io_throttle.QueueSendFile FAILED %s' % packetID)
     if _Debug:
         lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress)
     self.automat('scan-done', progress)