Exemple #1
0
    def DoDeferredPhysicalDeletes(self):

        num_files_deleted = 0
        num_thumbnails_deleted = 0

        pauser = HydrusData.BigJobPauser()

        (file_hash, thumbnail_hash) = self.Read('deferred_physical_delete')

        while (file_hash is not None
               or thumbnail_hash is not None) and not HG.view_shutdown:

            if file_hash is not None:

                path = ServerFiles.GetExpectedFilePath(file_hash)

                if os.path.exists(path):

                    HydrusPaths.RecyclePath(path)

                    num_files_deleted += 1

            if thumbnail_hash is not None:

                path = ServerFiles.GetExpectedThumbnailPath(thumbnail_hash)

                if os.path.exists(path):

                    HydrusPaths.RecyclePath(path)

                    num_thumbnails_deleted += 1

            self.WriteSynchronous('clear_deferred_physical_delete',
                                  file_hash=file_hash,
                                  thumbnail_hash=thumbnail_hash)

            (file_hash, thumbnail_hash) = self.Read('deferred_physical_delete')

            pauser.Pause()

        if num_files_deleted > 0 or num_thumbnails_deleted > 0:

            HydrusData.Print(
                'Physically deleted {} files and {} thumbnails from file storage.'
                .format(HydrusData.ToHumanInt(num_files_deleted),
                        HydrusData.ToHumanInt(num_files_deleted)))
Exemple #2
0
def CopyAndMergeTree(source, dest):

    pauser = HydrusData.BigJobPauser()

    MakeSureDirectoryExists(dest)

    num_errors = 0

    for (root, dirnames, filenames) in os.walk(source):

        dest_root = root.replace(source, dest)

        for dirname in dirnames:

            pauser.Pause()

            source_path = os.path.join(root, dirname)
            dest_path = os.path.join(dest_root, dirname)

            MakeSureDirectoryExists(dest_path)

            shutil.copystat(source_path, dest_path)

        for filename in filenames:

            if num_errors > 5:

                raise Exception('Too many errors, directory copy abandoned.')

            pauser.Pause()

            source_path = os.path.join(root, filename)
            dest_path = os.path.join(dest_root, filename)

            ok = MirrorFile(source_path, dest_path)

            if not ok:

                num_errors += 1
Exemple #3
0
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('file export')

            HG.client_controller.pub('message', job_key)

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media, path)) in enumerate(to_do):

                if job_key.IsCancelled():

                    break

                try:

                    x_of_y = HydrusData.ConvertValueRangeToPrettyString(
                        index + 1, num_to_do)

                    job_key.SetVariable('popup_text_1',
                                        'Done {}'.format(x_of_y))
                    job_key.SetVariable('popup_gauge_1',
                                        (index + 1, num_to_do))

                    QP.CallAfter(qt_update_label, x_of_y)

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key, ClientTags.TAG_DISPLAY_ACTUAL)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWriteable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if not job_key.IsCancelled() and delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                    'delete_lock_for_archived_files')

                if delete_lock_for_archived_files:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                        if not media.HasArchive()
                    }

                else:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                    }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            job_key.DeleteVariable('popup_gauge_1')
            job_key.SetVariable('popup_text_1', 'Done!')

            job_key.Finish()

            job_key.Delete(5)

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)
Exemple #4
0
def MirrorTree(source, dest, text_update_hook=None, is_cancelled_hook=None):

    pauser = HydrusData.BigJobPauser()

    MakeSureDirectoryExists(dest)

    num_errors = 0

    for (root, dirnames, filenames) in os.walk(source):

        if is_cancelled_hook is not None and is_cancelled_hook():

            return

        if text_update_hook is not None:

            text_update_hook('Copying ' + root + '.')

        dest_root = root.replace(source, dest)

        surplus_dest_paths = {
            os.path.join(dest_root, dest_filename)
            for dest_filename in os.listdir(dest_root)
        }

        for dirname in dirnames:

            pauser.Pause()

            source_path = os.path.join(root, dirname)
            dest_path = os.path.join(dest_root, dirname)

            surplus_dest_paths.discard(dest_path)

            MakeSureDirectoryExists(dest_path)

            shutil.copystat(source_path, dest_path)

        for filename in filenames:

            if num_errors > 5:

                raise Exception('Too many errors, directory copy abandoned.')

            pauser.Pause()

            source_path = os.path.join(root, filename)

            dest_path = os.path.join(dest_root, filename)

            surplus_dest_paths.discard(dest_path)

            ok = MirrorFile(source_path, dest_path)

            if not ok:

                num_errors += 1

        for dest_path in surplus_dest_paths:

            pauser.Pause()

            DeletePath(dest_path)
Exemple #5
0
def MergeTree(source, dest, text_update_hook=None):

    pauser = HydrusData.BigJobPauser()

    if not os.path.exists(dest):

        try:

            shutil.move(source, dest)

        except OSError:

            # if there were read only files in source and this was partition to partition, the copy2 goes ok but the subsequent source unlink fails
            # so, if it seems this has happened, let's just try a walking mergetree, which should be able to deal with these readonlies on a file-by-file basis
            if os.path.exists(dest):

                MergeTree(source, dest, text_update_hook=text_update_hook)

    else:

        if len(os.listdir(dest)) == 0:

            for filename in os.listdir(source):

                source_path = os.path.join(source, filename)
                dest_path = os.path.join(dest, filename)

                if not os.path.isdir(source_path):

                    MakeFileWritable(source_path)

                shutil.move(source_path, dest_path)

        else:

            num_errors = 0

            for (root, dirnames, filenames) in os.walk(source):

                if text_update_hook is not None:

                    text_update_hook('Copying ' + root + '.')

                dest_root = root.replace(source, dest)

                for dirname in dirnames:

                    pauser.Pause()

                    source_path = os.path.join(root, dirname)
                    dest_path = os.path.join(dest_root, dirname)

                    MakeSureDirectoryExists(dest_path)

                    shutil.copystat(source_path, dest_path)

                for filename in filenames:

                    if num_errors > 5:

                        raise Exception(
                            'Too many errors, directory move abandoned.')

                    pauser.Pause()

                    source_path = os.path.join(root, filename)
                    dest_path = os.path.join(dest_root, filename)

                    ok = MergeFile(source_path, dest_path)

                    if not ok:

                        num_errors += 1

            if num_errors == 0:

                DeletePath(source)
Exemple #6
0
 def work_callable():
     
     job_key = ClientThreading.JobKey( cancellable = True )
     
     title = 'moving files' if action == HC.CONTENT_UPDATE_MOVE else 'adding files'
     
     job_key.SetStatusTitle( title )
     
     BLOCK_SIZE = 64
     
     if len( applicable_media ) > BLOCK_SIZE:
         
         HG.client_controller.pub( 'message', job_key )
         
     
     pauser = HydrusData.BigJobPauser()
     
     num_to_do = len( applicable_media )
     
     now = HydrusData.GetNow()
     
     for ( i, block_of_media ) in enumerate( HydrusData.SplitListIntoChunks( applicable_media, BLOCK_SIZE ) ):
         
         if job_key.IsCancelled():
             
             break
             
         
         job_key.SetVariable( 'popup_text_1', HydrusData.ConvertValueRangeToPrettyString( i * BLOCK_SIZE, num_to_do ) )
         job_key.SetVariable( 'popup_gauge_1', ( i * BLOCK_SIZE, num_to_do ) )
         
         content_updates = []
         undelete_hashes = set()
         
         for m in block_of_media:
             
             if dest_service_key in m.GetLocationsManager().GetDeleted():
                 
                 undelete_hashes.add( m.GetHash() )
                 
             else:
                 
                 content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADD, ( m.GetMediaResult().GetFileInfoManager(), now ) ) )
                 
             
         
         if len( undelete_hashes ) > 0:
             
             content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, undelete_hashes ) )
             
         
         HG.client_controller.WriteSynchronous( 'content_updates', { dest_service_key : content_updates } )
         
         if action == HC.CONTENT_UPDATE_MOVE:
             
             block_of_hashes = [ m.GetHash() for m in block_of_media ]
             
             content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, block_of_hashes, reason = 'Moved to {}'.format( dest_service_name ) ) ]
             
             HG.client_controller.WriteSynchronous( 'content_updates', { source_service_key : content_updates } )
             
         
         pauser.Pause()
         
     
     job_key.Delete()
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media)) in enumerate(to_do):

                try:

                    QP.CallAfter(
                        qt_update_label,
                        HydrusData.ConvertValueRangeToPrettyString(
                            index + 1, num_to_do))

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = self._GetPath(media)

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key,
                                ClientTags.TAG_DISPLAY_SIBLINGS_AND_PARENTS)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWritable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                deletee_hashes = {
                    media.GetHash()
                    for (ordering_index, media) in to_do
                }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)