Exemple #1
0
    def tearDownClass(cls):

        for path in (cls._ssl_cert_path, cls._ssl_key_path):

            HydrusPaths.MakeFileWriteable(path)

            os.unlink(path)
Exemple #2
0
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('file export')

            HG.client_controller.pub('message', job_key)

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media, path)) in enumerate(to_do):

                if job_key.IsCancelled():

                    break

                try:

                    x_of_y = HydrusData.ConvertValueRangeToPrettyString(
                        index + 1, num_to_do)

                    job_key.SetVariable('popup_text_1',
                                        'Done {}'.format(x_of_y))
                    job_key.SetVariable('popup_gauge_1',
                                        (index + 1, num_to_do))

                    QP.CallAfter(qt_update_label, x_of_y)

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key, ClientTags.TAG_DISPLAY_ACTUAL)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWriteable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if not job_key.IsCancelled() and delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                    'delete_lock_for_archived_files')

                if delete_lock_for_archived_files:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                        if not media.HasArchive()
                    }

                else:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                    }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            job_key.DeleteVariable('popup_gauge_1')
            job_key.SetVariable('popup_text_1', 'Done!')

            job_key.Finish()

            job_key.Delete(5)

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)
Exemple #3
0
    def _DoExport(self):

        query_hash_ids = HG.client_controller.Read('file_query_ids',
                                                   self._file_search_context)

        media_results = []

        i = 0

        base = 256

        while i < len(query_hash_ids):

            if HC.options[
                    'pause_export_folders_sync'] or HydrusThreading.IsThreadShuttingDown(
                    ):

                return

            if i == 0: (last_i, i) = (0, base)
            else: (last_i, i) = (i, i + base)

            sub_query_hash_ids = query_hash_ids[last_i:i]

            more_media_results = HG.client_controller.Read(
                'media_results_from_ids', sub_query_hash_ids)

            media_results.extend(more_media_results)

        media_results.sort(key=lambda mr: mr.GetHashId())

        #

        terms = ParseExportPhrase(self._phrase)

        previous_paths = set()

        for (root, dirnames, filenames) in os.walk(self._path):

            previous_paths.update(
                (os.path.join(root, filename) for filename in filenames))

        sync_paths = set()

        client_files_manager = HG.client_controller.client_files_manager

        num_copied = 0

        for media_result in media_results:

            if HC.options[
                    'pause_export_folders_sync'] or HydrusThreading.IsThreadShuttingDown(
                    ):

                return

            hash = media_result.GetHash()
            mime = media_result.GetMime()
            size = media_result.GetSize()

            try:

                source_path = client_files_manager.GetFilePath(hash, mime)

            except HydrusExceptions.FileMissingException:

                raise Exception(
                    'A file to be exported, hash "{}", was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder\'s search, and possibly all your files.'
                )

            filename = GenerateExportFilename(self._path, media_result, terms)

            dest_path = os.path.normpath(os.path.join(self._path, filename))

            if not dest_path.startswith(self._path):

                raise Exception(
                    'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'
                    .format(self._path, hash.hex(), dest_path))

            dest_path_dir = os.path.dirname(dest_path)

            HydrusPaths.MakeSureDirectoryExists(dest_path_dir)

            if dest_path not in sync_paths:

                copied = HydrusPaths.MirrorFile(source_path, dest_path)

                if copied:

                    num_copied += 1

                    HydrusPaths.MakeFileWriteable(dest_path)

            sync_paths.add(dest_path)

        if num_copied > 0:

            HydrusData.Print('Export folder ' + self._name + ' exported ' +
                             HydrusData.ToHumanInt(num_copied) + ' files.')

        if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:

            deletee_paths = previous_paths.difference(sync_paths)

            for deletee_path in deletee_paths:

                ClientPaths.DeletePath(deletee_path)

            deletee_dirs = set()

            for (root, dirnames, filenames) in os.walk(self._path,
                                                       topdown=False):

                if root == self._path:

                    continue

                no_files = len(filenames) == 0

                useful_dirnames = [
                    dirname for dirname in dirnames
                    if os.path.join(root, dirname) not in deletee_dirs
                ]

                no_useful_dirs = len(useful_dirnames) == 0

                if no_useful_dirs and no_files:

                    deletee_dirs.add(root)

            for deletee_dir in deletee_dirs:

                if os.path.exists(deletee_dir):

                    HydrusPaths.DeletePath(deletee_dir)

            if len(deletee_paths) > 0:

                HydrusData.Print(
                    'Export folder {} deleted {} files and {} folders.'.format(
                        self._name, HydrusData.ToHumanInt(len(deletee_paths)),
                        HydrusData.ToHumanInt(len(deletee_dirs))))

        if self._delete_from_client_after_export:

            local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys(
                (HC.LOCAL_FILE_DOMAIN, ))

            service_keys_to_deletee_hashes = collections.defaultdict(list)

            delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                'delete_lock_for_archived_files')

            for media_result in media_results:

                if delete_lock_for_archived_files and not media_result.GetInbox(
                ):

                    continue

                hash = media_result.GetHash()

                deletee_service_keys = media_result.GetLocationsManager(
                ).GetCurrent().intersection(local_file_service_keys)

                for deletee_service_key in deletee_service_keys:

                    service_keys_to_deletee_hashes[deletee_service_key].append(
                        hash)

            reason = 'Deleted after export to Export Folder "{}".'.format(
                self._path)

            for (service_key,
                 deletee_hashes) in service_keys_to_deletee_hashes.items():

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                for chunk_of_hashes in chunks_of_hashes:

                    content_update = HydrusData.ContentUpdate(
                        HC.CONTENT_TYPE_FILES,
                        HC.CONTENT_UPDATE_DELETE,
                        chunk_of_hashes,
                        reason=reason)

                    HG.client_controller.WriteSynchronous(
                        'content_updates', {service_key: [content_update]})