def tearDownClass(cls): for path in (cls._ssl_cert_path, cls._ssl_key_path): HydrusPaths.MakeFileWritable(path) os.unlink(path)
def _DoExport( self ): query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context ) media_results = [] i = 0 base = 256 while i < len( query_hash_ids ): if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown(): return if i == 0: ( last_i, i ) = ( 0, base ) else: ( last_i, i ) = ( i, i + base ) sub_query_hash_ids = query_hash_ids[ last_i : i ] more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids ) media_results.extend( more_media_results ) media_results.sort( key = lambda mr: mr.GetHashId() ) # terms = ParseExportPhrase( self._phrase ) previous_paths = set() for ( root, dirnames, filenames ) in os.walk( self._path ): previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) ) sync_paths = set() client_files_manager = HG.client_controller.client_files_manager num_copied = 0 for media_result in media_results: if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown(): return hash = media_result.GetHash() mime = media_result.GetMime() size = media_result.GetSize() source_path = client_files_manager.GetFilePath( hash, mime ) filename = GenerateExportFilename( self._path, media_result, terms ) dest_path = os.path.normpath( os.path.join( self._path, filename ) ) if not dest_path.startswith( self._path ): raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) ) dest_path_dir = os.path.dirname( dest_path ) HydrusPaths.MakeSureDirectoryExists( dest_path_dir ) if dest_path not in sync_paths: copied = HydrusPaths.MirrorFile( source_path, dest_path ) if copied: num_copied += 1 HydrusPaths.MakeFileWritable( dest_path ) sync_paths.add( dest_path ) if num_copied > 0: HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' ) if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE: deletee_paths = previous_paths.difference( sync_paths ) for deletee_path in deletee_paths: ClientPaths.DeletePath( deletee_path ) deletee_dirs = set() for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ): if root == self._path: continue no_files = len( filenames ) == 0 useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ] no_useful_dirs = len( useful_dirnames ) == 0 if no_useful_dirs and no_files: deletee_dirs.add( root ) for deletee_dir in deletee_dirs: if os.path.exists( deletee_dir ): HydrusPaths.DeletePath( deletee_dir ) if len( deletee_paths ) > 0: HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) ) if self._delete_from_client_after_export: deletee_hashes = { media_result.GetHash() for media_result in media_results } chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 ) reason = 'Deleted after export to Export Folder "{}".'.format( self._path ) content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) for chunk_of_hashes in chunks_of_hashes ] for content_update in content_updates: HG.client_controller.WriteSynchronous( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ content_update ] } )
def do_it(directory, neighbouring_txt_tag_service_keys, delete_afterwards, export_symlinks, quit_afterwards): pauser = HydrusData.BigJobPauser() for (index, (ordering_index, media)) in enumerate(to_do): try: QP.CallAfter( qt_update_label, HydrusData.ConvertValueRangeToPrettyString( index + 1, num_to_do)) hash = media.GetHash() mime = media.GetMime() path = self._GetPath(media) path = os.path.normpath(path) if not path.startswith(directory): raise Exception( 'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".' .format(hash.hex(), path)) path_dir = os.path.dirname(path) HydrusPaths.MakeSureDirectoryExists(path_dir) if export_tag_txts: tags_manager = media.GetTagsManager() tags = set() for service_key in neighbouring_txt_tag_service_keys: current_tags = tags_manager.GetCurrent( service_key, ClientTags.TAG_DISPLAY_ACTUAL) tags.update(current_tags) tags = sorted(tags) txt_path = path + '.txt' with open(txt_path, 'w', encoding='utf-8') as f: f.write(os.linesep.join(tags)) source_path = client_files_manager.GetFilePath( hash, mime, check_file_exists=False) if export_symlinks: os.symlink(source_path, path) else: HydrusPaths.MirrorFile(source_path, path) HydrusPaths.MakeFileWritable(path) except: QP.CallAfter( QW.QMessageBox.information, self, 'Information', 'Encountered a problem while attempting to export file with index ' + str(ordering_index + 1) + ':' + os.linesep * 2 + traceback.format_exc()) break pauser.Pause() if delete_afterwards: QP.CallAfter(qt_update_label, 'deleting') deletee_hashes = { media.GetHash() for (ordering_index, media) in to_do } chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64) reason = 'Deleted after manual export to "{}".'.format( directory) content_updates = [ HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason=reason) for chunk_of_hashes in chunks_of_hashes ] for content_update in content_updates: HG.client_controller.WriteSynchronous( 'content_updates', {CC.LOCAL_FILE_SERVICE_KEY: [content_update]}) QP.CallAfter(qt_update_label, 'done!') time.sleep(1) QP.CallAfter(qt_update_label, 'export') QP.CallAfter(qt_done, quit_afterwards)