def _OpenSelectedFileSeedData(self):

        file_seeds = self._list_ctrl.GetData(only_selected=True)

        if len(file_seeds) > 0:

            if len(file_seeds) > 10:

                message = 'You have many objects selected--are you sure you want to open them all?'

                result = ClientGUIDialogsQuick.GetYesNo(self, message)

                if result != QW.QDialog.Accepted:

                    return

            if file_seeds[0].file_seed_data.startswith('http'):

                for file_seed in file_seeds:

                    ClientPaths.LaunchURLInWebBrowser(file_seed.file_seed_data)

            else:

                try:

                    for file_seed in file_seeds:

                        HydrusPaths.OpenFileLocation(file_seed.file_seed_data)

                except Exception as e:

                    QW.QMessageBox.critical(self, 'Error', str(e))
Beispiel #2
0
def DoOpenKnownURLFromShortcut( win, media ):
    
    urls = media.GetLocationsManager().GetURLs()
    
    matched_labels_and_urls = []
    unmatched_urls = []
    
    if len( urls ) > 0:
        
        for url in urls:
            
            try:
                
                url_class = HG.client_controller.network_engine.domain_manager.GetURLClass( url )
                
            except HydrusExceptions.URLClassException:
                
                continue
                
            
            if url_class is None:
                
                unmatched_urls.append( url )
                
            else:
                
                label = url_class.GetName() + ': ' + url
                
                matched_labels_and_urls.append( ( label, url ) )
                
            
        
        matched_labels_and_urls.sort()
        unmatched_urls.sort()
        
    
    if len( matched_labels_and_urls ) == 0:
        
        return
        
    elif len( matched_labels_and_urls ) == 1:
        
        url = matched_labels_and_urls[0][1]
        
    else:
        
        matched_labels_and_urls.extend( ( url, url ) for url in unmatched_urls )
        
        try:
            
            url = ClientGUIDialogsQuick.SelectFromList( win, 'Select which URL', matched_labels_and_urls, sort_tuples = False )
            
        except HydrusExceptions.CancelledException:
            
            return
            
        
    
    ClientPaths.LaunchURLInWebBrowser( url )
Beispiel #3
0
 def do_it( urls ):
     
     job_key = None
     
     num_urls = len( urls )
     
     if num_urls > 5:
         
         job_key = ClientThreading.JobKey( pausable = True, cancellable = True )
         
         job_key.SetVariable( 'popup_title', 'Opening URLs' )
         
         HG.client_controller.pub( 'message', job_key )
         
     
     try:
         
         for ( i, url ) in enumerate( urls ):
             
             if job_key is not None:
                 
                 ( i_paused, should_quit ) = job_key.WaitIfNeeded()
                 
                 if should_quit:
                     
                     return
                     
                 
                 job_key.SetVariable( 'popup_text_1', HydrusData.ConvertValueRangeToPrettyString( i + 1, num_urls ) )
                 job_key.SetVariable( 'popup_gauge_1', ( i + 1, num_urls ) )
                 
             
             ClientPaths.LaunchURLInWebBrowser( url )
             
             time.sleep( 1 )
             
         
     finally:
         
         if job_key is not None:
             
             job_key.Finish()
             
             job_key.Delete( 1 )
    def _OpenSelectedGalleryURLs(self):

        gallery_seeds = self._list_ctrl.GetData(only_selected=True)

        if len(gallery_seeds) > 0:

            if len(gallery_seeds) > 10:

                message = 'You have many objects selected--are you sure you want to open them all?'

                result = ClientGUIDialogsQuick.GetYesNo(self, message)

                if result != QW.QDialog.Accepted:

                    return

            for gallery_seed in gallery_seeds:

                ClientPaths.LaunchURLInWebBrowser(gallery_seed.url)
Beispiel #5
0
    def _ActionPaths(self):

        for status in (CC.STATUS_SUCCESSFUL_AND_NEW,
                       CC.STATUS_SUCCESSFUL_BUT_REDUNDANT, CC.STATUS_DELETED,
                       CC.STATUS_ERROR):

            action = self._actions[status]

            if action == CC.IMPORT_FOLDER_DELETE:

                while True:

                    file_seed = self._file_seed_cache.GetNextFileSeed(status)

                    if file_seed is None or HG.view_shutdown:

                        break

                    path = file_seed.file_seed_data

                    try:

                        if os.path.exists(path) and not os.path.isdir(path):

                            ClientPaths.DeletePath(path)

                        txt_path = path + '.txt'

                        if os.path.exists(txt_path):

                            ClientPaths.DeletePath(txt_path)

                        self._file_seed_cache.RemoveFileSeeds((file_seed, ))

                    except Exception as e:

                        raise Exception(
                            'Tried to delete "{}", but could not.'.format(
                                path))

            elif action == CC.IMPORT_FOLDER_MOVE:

                while True:

                    file_seed = self._file_seed_cache.GetNextFileSeed(status)

                    if file_seed is None or HG.view_shutdown:

                        break

                    path = file_seed.file_seed_data

                    try:

                        dest_dir = self._action_locations[status]

                        if not os.path.exists(dest_dir):

                            raise Exception(
                                'Tried to move "{}" to "{}", but the destination directory did not exist.'
                                .format(path, dest_dir))

                        if os.path.exists(path) and not os.path.isdir(path):

                            filename = os.path.basename(path)

                            dest_path = os.path.join(dest_dir, filename)

                            dest_path = HydrusPaths.AppendPathUntilNoConflicts(
                                dest_path)

                            HydrusPaths.MergeFile(path, dest_path)

                        txt_path = path + '.txt'

                        if os.path.exists(txt_path):

                            txt_filename = os.path.basename(txt_path)

                            txt_dest_path = os.path.join(
                                dest_dir, txt_filename)

                            txt_dest_path = HydrusPaths.AppendPathUntilNoConflicts(
                                txt_dest_path)

                            HydrusPaths.MergeFile(txt_path, txt_dest_path)

                        self._file_seed_cache.RemoveFileSeeds((file_seed, ))

                    except Exception as e:

                        HydrusData.ShowText('Import folder tried to move ' +
                                            path + ', but could not:')

                        HydrusData.ShowException(e)

                        HydrusData.ShowText('Import folder has been paused.')

                        self._paused = True

                        return

            elif status == CC.IMPORT_FOLDER_IGNORE:

                pass
Beispiel #6
0
    def _WorkOnFiles(self, page_key):

        file_seed = self._file_seed_cache.GetNextFileSeed(CC.STATUS_UNKNOWN)

        if file_seed is None:

            return

        did_substantial_work = False

        path = file_seed.file_seed_data

        with self._lock:

            self._current_action = 'importing'

        def status_hook(text):

            with self._lock:

                if len(text) > 0:

                    text = text.splitlines()[0]

                self._current_action = text

        file_seed.ImportPath(self._file_seed_cache,
                             self._file_import_options,
                             status_hook=status_hook)

        did_substantial_work = True

        if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES:

            if file_seed.ShouldPresent(self._file_import_options):

                file_seed.PresentToPage(page_key)

                did_substantial_work = True

            if self._delete_after_success:

                try:

                    ClientPaths.DeletePath(path)

                except Exception as e:

                    HydrusData.ShowText('While attempting to delete ' + path +
                                        ', the following error occurred:')
                    HydrusData.ShowException(e)

                txt_path = path + '.txt'

                if os.path.exists(txt_path):

                    try:

                        ClientPaths.DeletePath(txt_path)

                    except Exception as e:

                        HydrusData.ShowText('While attempting to delete ' +
                                            txt_path +
                                            ', the following error occurred:')
                        HydrusData.ShowException(e)

        with self._lock:

            self._current_action = ''

        if did_substantial_work:

            time.sleep(
                ClientImporting.DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME)
Beispiel #7
0
    def _WorkOnFiles(self):

        file_seed = self._file_seed_cache.GetNextFileSeed(CC.STATUS_UNKNOWN)

        if file_seed is None:

            return

        path = file_seed.file_seed_data

        with self._lock:

            self._files_status = 'importing'

        def status_hook(text):

            with self._lock:

                self._files_status = ClientImportControl.NeatenStatusText(text)

        file_seed.ImportPath(self._file_seed_cache,
                             self._file_import_options,
                             status_hook=status_hook)

        if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES:

            if file_seed.ShouldPresent(
                    self._file_import_options.GetPresentationImportOptions()):

                file_seed.PresentToPage(self._page_key)

            if self._delete_after_success:

                try:

                    ClientPaths.DeletePath(path)

                except Exception as e:

                    HydrusData.ShowText('While attempting to delete ' + path +
                                        ', the following error occurred:')
                    HydrusData.ShowException(e)

                txt_path = path + '.txt'

                if os.path.exists(txt_path):

                    try:

                        ClientPaths.DeletePath(txt_path)

                    except Exception as e:

                        HydrusData.ShowText('While attempting to delete ' +
                                            txt_path +
                                            ', the following error occurred:')
                        HydrusData.ShowException(e)

        with self._lock:

            self._files_status = ''

        time.sleep(
            ClientImporting.DID_SUBSTANTIAL_FILE_WORK_MINIMUM_SLEEP_TIME)
Beispiel #8
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         try:
             
             source_path = client_files_manager.GetFilePath( hash, mime )
             
         except HydrusExceptions.FileMissingException:
             
             raise Exception( 'A file to be exported, hash "{}", was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder\'s search, and possibly all your files.' )
             
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.TryToGiveFileNicePermissionBits( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) )
         
         service_keys_to_deletee_hashes = collections.defaultdict( list )
         
         delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean( 'delete_lock_for_archived_files' )
         
         for media_result in media_results:
             
             if delete_lock_for_archived_files and not media_result.GetInbox():
                 
                 continue
                 
             
             hash = media_result.GetHash()
             
             deletee_service_keys = media_result.GetLocationsManager().GetCurrent().intersection( local_file_service_keys )
             
             for deletee_service_key in deletee_service_keys:
                 
                 service_keys_to_deletee_hashes[ deletee_service_key ].append( hash )
                 
             
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         for ( service_key, deletee_hashes ) in service_keys_to_deletee_hashes.items():
             
             chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
             
             for chunk_of_hashes in chunks_of_hashes:
                 
                 content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason )
                 
                 HG.client_controller.WriteSynchronous( 'content_updates', { service_key : [ content_update ] } )
Beispiel #9
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         source_path = client_files_manager.GetFilePath( hash, mime )
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.MakeFileWritable( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         deletee_hashes = { media_result.GetHash() for media_result in media_results }
         
         chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) for chunk_of_hashes in chunks_of_hashes ]
         
         for content_update in content_updates:
             
             HG.client_controller.WriteSynchronous( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ content_update ] } )