def _ActionPaths(self): for status in (CC.STATUS_SUCCESSFUL_AND_NEW, CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, CC.STATUS_DELETED, CC.STATUS_ERROR): action = self._actions[status] if action == CC.IMPORT_FOLDER_DELETE: while True: file_seed = self._file_seed_cache.GetNextFileSeed(status) if file_seed is None or HG.view_shutdown: break path = file_seed.file_seed_data try: if os.path.exists(path) and not os.path.isdir(path): ClientPaths.DeletePath(path) txt_path = path + '.txt' if os.path.exists(txt_path): ClientPaths.DeletePath(txt_path) self._file_seed_cache.RemoveFileSeeds((file_seed, )) except Exception as e: raise Exception( 'Tried to delete "{}", but could not.'.format( path)) elif action == CC.IMPORT_FOLDER_MOVE: while True: file_seed = self._file_seed_cache.GetNextFileSeed(status) if file_seed is None or HG.view_shutdown: break path = file_seed.file_seed_data try: dest_dir = self._action_locations[status] if not os.path.exists(dest_dir): raise Exception( 'Tried to move "{}" to "{}", but the destination directory did not exist.' .format(path, dest_dir)) if os.path.exists(path) and not os.path.isdir(path): filename = os.path.basename(path) dest_path = os.path.join(dest_dir, filename) dest_path = HydrusPaths.AppendPathUntilNoConflicts( dest_path) HydrusPaths.MergeFile(path, dest_path) txt_path = path + '.txt' if os.path.exists(txt_path): txt_filename = os.path.basename(txt_path) txt_dest_path = os.path.join( dest_dir, txt_filename) txt_dest_path = HydrusPaths.AppendPathUntilNoConflicts( txt_dest_path) HydrusPaths.MergeFile(txt_path, txt_dest_path) self._file_seed_cache.RemoveFileSeeds((file_seed, )) except Exception as e: HydrusData.ShowText('Import folder tried to move ' + path + ', but could not:') HydrusData.ShowException(e) HydrusData.ShowText('Import folder has been paused.') self._paused = True return elif status == CC.IMPORT_FOLDER_IGNORE: pass
def _WorkOnFiles(self, page_key): file_seed = self._file_seed_cache.GetNextFileSeed(CC.STATUS_UNKNOWN) if file_seed is None: return did_substantial_work = False path = file_seed.file_seed_data with self._lock: self._current_action = 'importing' def status_hook(text): with self._lock: if len(text) > 0: text = text.splitlines()[0] self._current_action = text file_seed.ImportPath(self._file_seed_cache, self._file_import_options, status_hook=status_hook) did_substantial_work = True if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES: if file_seed.ShouldPresent(self._file_import_options): file_seed.PresentToPage(page_key) did_substantial_work = True if self._delete_after_success: try: ClientPaths.DeletePath(path) except Exception as e: HydrusData.ShowText('While attempting to delete ' + path + ', the following error occurred:') HydrusData.ShowException(e) txt_path = path + '.txt' if os.path.exists(txt_path): try: ClientPaths.DeletePath(txt_path) except Exception as e: HydrusData.ShowText('While attempting to delete ' + txt_path + ', the following error occurred:') HydrusData.ShowException(e) with self._lock: self._current_action = '' if did_substantial_work: time.sleep( ClientImporting.DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME)
def _DoExport( self ): query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context ) media_results = [] i = 0 base = 256 while i < len( query_hash_ids ): if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown(): return if i == 0: ( last_i, i ) = ( 0, base ) else: ( last_i, i ) = ( i, i + base ) sub_query_hash_ids = query_hash_ids[ last_i : i ] more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids ) media_results.extend( more_media_results ) media_results.sort( key = lambda mr: mr.GetHashId() ) # terms = ParseExportPhrase( self._phrase ) previous_paths = set() for ( root, dirnames, filenames ) in os.walk( self._path ): previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) ) sync_paths = set() client_files_manager = HG.client_controller.client_files_manager num_copied = 0 for media_result in media_results: if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown(): return hash = media_result.GetHash() mime = media_result.GetMime() size = media_result.GetSize() try: source_path = client_files_manager.GetFilePath( hash, mime ) except HydrusExceptions.FileMissingException: raise Exception( 'A file to be exported, hash "{}", was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder\'s search, and possibly all your files.' ) filename = GenerateExportFilename( self._path, media_result, terms ) dest_path = os.path.normpath( os.path.join( self._path, filename ) ) if not dest_path.startswith( self._path ): raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) ) dest_path_dir = os.path.dirname( dest_path ) HydrusPaths.MakeSureDirectoryExists( dest_path_dir ) if dest_path not in sync_paths: copied = HydrusPaths.MirrorFile( source_path, dest_path ) if copied: num_copied += 1 HydrusPaths.TryToGiveFileNicePermissionBits( dest_path ) sync_paths.add( dest_path ) if num_copied > 0: HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' ) if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE: deletee_paths = previous_paths.difference( sync_paths ) for deletee_path in deletee_paths: ClientPaths.DeletePath( deletee_path ) deletee_dirs = set() for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ): if root == self._path: continue no_files = len( filenames ) == 0 useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ] no_useful_dirs = len( useful_dirnames ) == 0 if no_useful_dirs and no_files: deletee_dirs.add( root ) for deletee_dir in deletee_dirs: if os.path.exists( deletee_dir ): HydrusPaths.DeletePath( deletee_dir ) if len( deletee_paths ) > 0: HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) ) if self._delete_from_client_after_export: local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) ) service_keys_to_deletee_hashes = collections.defaultdict( list ) delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean( 'delete_lock_for_archived_files' ) for media_result in media_results: if delete_lock_for_archived_files and not media_result.GetInbox(): continue hash = media_result.GetHash() deletee_service_keys = media_result.GetLocationsManager().GetCurrent().intersection( local_file_service_keys ) for deletee_service_key in deletee_service_keys: service_keys_to_deletee_hashes[ deletee_service_key ].append( hash ) reason = 'Deleted after export to Export Folder "{}".'.format( self._path ) for ( service_key, deletee_hashes ) in service_keys_to_deletee_hashes.items(): chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 ) for chunk_of_hashes in chunks_of_hashes: content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) HG.client_controller.WriteSynchronous( 'content_updates', { service_key : [ content_update ] } )
def _WorkOnFiles(self): file_seed = self._file_seed_cache.GetNextFileSeed(CC.STATUS_UNKNOWN) if file_seed is None: return path = file_seed.file_seed_data with self._lock: self._files_status = 'importing' def status_hook(text): with self._lock: self._files_status = ClientImportControl.NeatenStatusText(text) file_seed.ImportPath(self._file_seed_cache, self._file_import_options, status_hook=status_hook) if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES: if file_seed.ShouldPresent( self._file_import_options.GetPresentationImportOptions()): file_seed.PresentToPage(self._page_key) if self._delete_after_success: try: ClientPaths.DeletePath(path) except Exception as e: HydrusData.ShowText('While attempting to delete ' + path + ', the following error occurred:') HydrusData.ShowException(e) txt_path = path + '.txt' if os.path.exists(txt_path): try: ClientPaths.DeletePath(txt_path) except Exception as e: HydrusData.ShowText('While attempting to delete ' + txt_path + ', the following error occurred:') HydrusData.ShowException(e) with self._lock: self._files_status = '' time.sleep( ClientImporting.DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME)
def _DoExport( self ): query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context ) media_results = [] i = 0 base = 256 while i < len( query_hash_ids ): if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown(): return if i == 0: ( last_i, i ) = ( 0, base ) else: ( last_i, i ) = ( i, i + base ) sub_query_hash_ids = query_hash_ids[ last_i : i ] more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids ) media_results.extend( more_media_results ) media_results.sort( key = lambda mr: mr.GetHashId() ) # terms = ParseExportPhrase( self._phrase ) previous_paths = set() for ( root, dirnames, filenames ) in os.walk( self._path ): previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) ) sync_paths = set() client_files_manager = HG.client_controller.client_files_manager num_copied = 0 for media_result in media_results: if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown(): return hash = media_result.GetHash() mime = media_result.GetMime() size = media_result.GetSize() source_path = client_files_manager.GetFilePath( hash, mime ) filename = GenerateExportFilename( self._path, media_result, terms ) dest_path = os.path.normpath( os.path.join( self._path, filename ) ) if not dest_path.startswith( self._path ): raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) ) dest_path_dir = os.path.dirname( dest_path ) HydrusPaths.MakeSureDirectoryExists( dest_path_dir ) if dest_path not in sync_paths: copied = HydrusPaths.MirrorFile( source_path, dest_path ) if copied: num_copied += 1 HydrusPaths.MakeFileWritable( dest_path ) sync_paths.add( dest_path ) if num_copied > 0: HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' ) if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE: deletee_paths = previous_paths.difference( sync_paths ) for deletee_path in deletee_paths: ClientPaths.DeletePath( deletee_path ) deletee_dirs = set() for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ): if root == self._path: continue no_files = len( filenames ) == 0 useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ] no_useful_dirs = len( useful_dirnames ) == 0 if no_useful_dirs and no_files: deletee_dirs.add( root ) for deletee_dir in deletee_dirs: if os.path.exists( deletee_dir ): HydrusPaths.DeletePath( deletee_dir ) if len( deletee_paths ) > 0: HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) ) if self._delete_from_client_after_export: deletee_hashes = { media_result.GetHash() for media_result in media_results } chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 ) reason = 'Deleted after export to Export Folder "{}".'.format( self._path ) content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) for chunk_of_hashes in chunks_of_hashes ] for content_update in content_updates: HG.client_controller.WriteSynchronous( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ content_update ] } )