def test_simple(self):

        rwlock = ClientThreading.FileRWLock()

        result_list = []

        do_read_job(rwlock, result_list, '1')

        expected_result = []

        expected_result.extend(['begin read 1', 'end read 1'])

        self.assertEqual(result_list, expected_result)

        #

        result_list = []

        do_write_job(rwlock, result_list, '1')

        expected_result = []

        expected_result.extend(['begin write 1', 'end write 1'])

        self.assertEqual(result_list, expected_result)
Exemple #2
0
    def FetchTags(self):

        script = self._script_choice.GetValue()

        if script.UsesUserInput():

            message = 'Enter the custom input for the file lookup script.'

            with ClientGUIDialogs.DialogTextEntry(self, message) as dlg:

                if dlg.exec() != QW.QDialog.Accepted:

                    return

                file_identifier = dlg.GetValue()

        else:

            (m, ) = self._media

            file_identifier = script.ConvertMediaToFileIdentifier(m)

        stop_time = HydrusData.GetNow() + 30

        job_key = ClientThreading.JobKey(cancellable=True, stop_time=stop_time)

        self._script_management.SetJobKey(job_key)

        self._SetTags([])

        HG.client_controller.CallToThread(self.THREADFetchTags, script,
                                          job_key, file_identifier)
Exemple #3
0
 def RegenerateTree( self ):
     
     job_key = ClientThreading.JobKey()
     
     try:
         
         job_key.SetStatusTitle( 'regenerating similar file search data' )
         
         HG.client_controller.pub( 'modal_message', job_key )
         
         job_key.SetVariable( 'popup_text_1', 'purging search info of orphans' )
         
         self._c.execute( 'DELETE FROM shape_perceptual_hash_map WHERE hash_id NOT IN ( SELECT hash_id FROM current_files );' )
         
         job_key.SetVariable( 'popup_text_1', 'gathering all leaves' )
         
         self._c.execute( 'DELETE FROM shape_vptree;' )
         
         all_nodes = self._c.execute( 'SELECT phash_id, phash FROM shape_perceptual_hashes;' ).fetchall()
         
         job_key.SetVariable( 'popup_text_1', HydrusData.ToHumanInt( len( all_nodes ) ) + ' leaves found, now regenerating' )
         
         ( root_id, root_phash ) = self._PopBestRootNode( all_nodes ) #HydrusData.RandomPop( all_nodes )
         
         self._GenerateBranch( job_key, None, root_id, root_phash, all_nodes )
         
     finally:
         
         job_key.SetVariable( 'popup_text_1', 'done!' )
         job_key.DeleteVariable( 'popup_text_2' )
         
         job_key.Finish()
         
         job_key.Delete( 5 )
Exemple #4
0
def CatchExceptionClient(etype, value, tb):

    try:

        trace_list = traceback.format_tb(tb)

        trace = ''.join(trace_list)

        pretty_value = str(value)

        if os.linesep in pretty_value:

            (first_line, anything_else) = pretty_value.split(os.linesep, 1)

            trace = trace + os.linesep + anything_else

        else:

            first_line = pretty_value

        job_key = ClientThreading.JobKey()

        if etype == HydrusExceptions.ShutdownException:

            return

        else:

            try:
                job_key.SetVariable('popup_title', str(etype.__name__))
            except:
                job_key.SetVariable('popup_title', str(etype))

            job_key.SetVariable('popup_text_1', first_line)
            job_key.SetVariable('popup_traceback', trace)

        text = job_key.ToString()

        HydrusData.Print('Uncaught exception:')

        HydrusData.DebugPrint(text)

        HG.client_controller.pub('message', job_key)

    except:

        text = 'Encountered an error I could not parse:'

        text += os.linesep

        text += str((etype, value, tb))

        try:
            text += traceback.format_exc()
        except:
            pass

        HydrusData.ShowText(text)

    time.sleep(1)
Exemple #5
0
    def CallBlockingToQt(self, win, func, *args, **kwargs):
        def qt_code(win: QW.QWidget, job_key: ClientThreading.JobKey):

            try:

                if win is not None and not QP.isValid(win):

                    raise HydrusExceptions.QtDeadWindowException(
                        'Parent Window was destroyed before Qt command was called!'
                    )

                result = func(*args, **kwargs)

                job_key.SetVariable('result', result)

            except (HydrusExceptions.QtDeadWindowException,
                    HydrusExceptions.DBCredentialsException,
                    HydrusExceptions.ShutdownException) as e:

                job_key.SetErrorException(e)

            except Exception as e:

                job_key.SetErrorException(e)

                HydrusData.Print('CallBlockingToQt just caught this error:')
                HydrusData.DebugPrint(traceback.format_exc())

            finally:

                job_key.Finish()

        job_key = ClientThreading.JobKey()

        QP.CallAfter(qt_code, win, job_key)

        while not job_key.IsDone():

            if HG.model_shutdown:

                raise HydrusExceptions.ShutdownException(
                    'Application is shutting down!')

            time.sleep(0.05)

        if job_key.HasVariable('result'):

            # result can be None, for qt_code that has no return variable

            result = job_key.GetIfHasVariable('result')

            return result

        if job_key.HadError():

            e = job_key.GetErrorException()

            raise e

        raise HydrusExceptions.ShutdownException()
Exemple #6
0
 def CallRepeatingQtSafe( self, window, initial_delay, period, func, *args, **kwargs ):
     
     call = HydrusData.Call( func, *args, **kwargs )
     
     job = ClientThreading.QtAwareRepeatingJob(self, self._job_scheduler, window, initial_delay, period, call)
     
     self._job_scheduler.AddJob( job )
     
     return job
Exemple #7
0
 def __init__( self, parent ):
     
     QW.QWidget.__init__( self, parent )
     
     self.setWindowFlags( QC.Qt.Tool | QC.Qt.FramelessWindowHint )
     
     self.setAttribute( QC.Qt.WA_ShowWithoutActivating )
     
     self.setSizePolicy( QW.QSizePolicy.MinimumExpanding, QW.QSizePolicy.Preferred )
     
     self._last_best_size_i_fit_on = ( 0, 0 )
     
     self._max_messages_to_display = 10
     
     vbox = QP.VBoxLayout()
     
     self._message_panel = QW.QWidget( self )
     
     self._message_vbox = QP.VBoxLayout( margin = 0 )
     
     vbox.setSizeConstraint( QW.QLayout.SetFixedSize )
     
     self._message_panel.setLayout( self._message_vbox )
     self._message_panel.setSizePolicy( QW.QSizePolicy.MinimumExpanding, QW.QSizePolicy.Preferred )
     
     self._summary_bar = PopupMessageSummaryBar( self, self )
     self._summary_bar.setSizePolicy( QW.QSizePolicy.MinimumExpanding, QW.QSizePolicy.Preferred )
     
     QP.AddToLayout( vbox, self._message_panel )
     QP.AddToLayout( vbox, self._summary_bar )
     
     self.setLayout( vbox )
     
     self._pending_job_keys = []
     
     self._gui_event_filter = QP.WidgetEventFilter( parent )
     self._gui_event_filter.EVT_SIZE( self.EventParentMovedOrResized )
     self._gui_event_filter.EVT_MOVE( self.EventParentMovedOrResized )
     
     HG.client_controller.sub( self, 'AddMessage', 'message' )
     
     self._old_excepthook = sys.excepthook
     self._old_show_exception = HydrusData.ShowException
     
     sys.excepthook = ClientData.CatchExceptionClient
     HydrusData.ShowException = ClientData.ShowExceptionClient
     HydrusData.ShowText = ClientData.ShowTextClient
     
     job_key = ClientThreading.JobKey()
     
     job_key.SetVariable( 'popup_text_1', 'initialising popup message manager\u2026' )
     
     self._update_job = HG.client_controller.CallRepeatingQtSafe( self, 0.25, 0.5, self.REPEATINGUpdate )
     
     HG.client_controller.CallLaterQtSafe(self, 0.5, self.AddMessage, job_key)
     
     HG.client_controller.CallLaterQtSafe(self, 1.0, job_key.Delete)
Exemple #8
0
def ShowExceptionTupleClient(etype, value, tb, do_wait=True):

    if etype is None:

        etype = HydrusExceptions.UnknownException

    if value is None:

        value = 'Unknown error'

    if tb is None:

        trace = 'No error trace--here is the stack:' + os.linesep + ''.join(
            traceback.format_stack())

    else:

        trace = ''.join(traceback.format_exception(etype, value, tb))

    pretty_value = str(value)

    if os.linesep in pretty_value:

        (first_line, anything_else) = pretty_value.split(os.linesep, 1)

        trace = trace + os.linesep + anything_else

    else:

        first_line = pretty_value

    job_key = ClientThreading.JobKey()

    if etype == HydrusExceptions.ShutdownException:

        return

    else:

        title = str(getattr(etype, '__name__', etype))

        job_key.SetStatusTitle(title)

        job_key.SetVariable('popup_text_1', first_line)
        job_key.SetTraceback(trace)

    text = job_key.ToString()

    HydrusData.Print('Exception:')

    HydrusData.DebugPrint(text)

    HG.client_controller.pub('message', job_key)

    if do_wait:

        time.sleep(1)
Exemple #9
0
def CopyHashesToClipboard( win: QW.QWidget, hash_type: str, medias: typing.Sequence[ ClientMedia.Media ] ):
    
    sha256_hashes = list( itertools.chain.from_iterable( ( media.GetHashes( ordered = True ) for media in medias ) ) )
    
    if hash_type == 'sha256':
        
        desired_hashes = sha256_hashes
        
    else:
        
        num_hashes = len( sha256_hashes )
        num_remote_sha256_hashes = len( [ itertools.chain.from_iterable( ( media.GetHashes( discriminant = CC.DISCRIMINANT_NOT_LOCAL, ordered = True ) for media in medias ) ) ] )
        
        desired_hashes = HG.client_controller.Read( 'file_hashes', sha256_hashes, 'sha256', hash_type )
        
        num_missing = num_hashes - len( desired_hashes )
        
        if num_missing > 0:
            
            if num_missing == num_hashes:
                
                message = 'Unfortunately, none of the {} hashes could be found.'.format( hash_type )
                
            else:
                
                message = 'Unfortunately, {} of the {} hashes could not be found.'.format( HydrusData.ToHumanInt( num_missing ), hash_type )
                
            
            if num_remote_sha256_hashes > 0:
                
                message += ' {} of the files you wanted are not currently in this client. If they have never visited this client, the lookup is impossible.'.format( HydrusData.ToHumanInt( num_remote_sha256_hashes ) )
                
            
            if num_remote_sha256_hashes < num_hashes:
                
                message += ' It could be that some of the local files are currently missing this information in the hydrus database. A file maintenance job (under the database menu) can repopulate this data.'
                
            
            QW.QMessageBox.warning( win, 'Warning', message )
            
        
    
    if len( desired_hashes ) > 0:
        
        hex_hashes = os.linesep.join( [ desired_hash.hex() for desired_hash in desired_hashes ] )
        
        HG.client_controller.pub( 'clipboard', 'text', hex_hashes )
        
        job_key = ClientThreading.JobKey()
        
        job_key.SetVariable( 'popup_text_1', '{} {} hashes copied'.format( HydrusData.ToHumanInt( len( desired_hashes ) ), hash_type ) )
        
        HG.client_controller.pub( 'message', job_key )
        
        job_key.Delete( 2 )
Exemple #10
0
 def test_mixed_competing( self ):
     
     rwlock = ClientThreading.FileRWLock()
     
     result_list = []
     
     all_expected_results = set()
     
     for i in range( 10 ):
         
         HG.test_controller.CallLater( 0.0 * i, do_read_job, rwlock, result_list, str( i ) )
         
         all_expected_results.update( [ 'begin read {}'.format( i ), 'end read {}'.format( i ) ] )
         
     
     for i in range( 5 ):
         
         HG.test_controller.CallLater( 0.0 * i, do_write_job, rwlock, result_list, str( i ) )
         
         all_expected_results.update( [ 'begin write {}'.format( i ), 'end write {}'.format( i ) ] )
         
     
     time.sleep( 0.2 )
     
     with rwlock.read:
         
         pass
         
     
     with rwlock.write:
         
         pass
         
     
     self.assertEqual( set( result_list ), all_expected_results )
     
     for i in range( 15 ):
         
         # even if reads mix up a bit, every two items should have the same read/write value
         
         a = result_list[ i * 2 ]
         b = result_list[ ( i * 2 ) + 1 ]
         
         if 'read' in a:
             
             self.assertIn( 'read', b )
             
         
         if 'write' in a:
             
             self.assertTrue( a.startswith( 'begin write' ) )
             self.assertTrue( b.startswith( 'end write' ) )
             
             self.assertEqual( a[-2:], b[-2:] )
Exemple #11
0
def ShowTextClient(text):

    job_key = ClientThreading.JobKey()

    job_key.SetVariable('popup_text_1', str(text))

    text = job_key.ToString()

    HydrusData.Print(text)

    HG.client_controller.pub('message', job_key)
    def test_competing_write(self):

        rwlock = ClientThreading.FileRWLock()

        result_list = []

        HG.test_controller.CallLater(0.0, do_write_job, rwlock, result_list,
                                     '1')
        HG.test_controller.CallLater(0.1, do_write_job, rwlock, result_list,
                                     '2')

        time.sleep(0.2)

        with rwlock.read:

            pass

        expected_result = []

        expected_result.extend(['begin write 1', 'end write 1'])
        expected_result.extend(['begin write 2', 'end write 2'])

        self.assertEqual(result_list, expected_result)

        #

        result_list = []

        for i in range(10):

            HG.test_controller.CallLater(0.0, do_write_job, rwlock,
                                         result_list, str(i))

        time.sleep(0.2)

        with rwlock.read:

            pass

        expected_results = set()

        for i in range(10):

            expected_pair = ('begin write {}'.format(i),
                             'end write {}'.format(i))

            expected_results.add(expected_pair)

        for i in range(10):

            result_pair = (result_list[i * 2], result_list[(i * 2) + 1])

            self.assertIn(result_pair, expected_results)
Exemple #13
0
    def CallLaterQtSafe(self, window, initial_delay, label, func, *args,
                        **kwargs):

        call = HydrusData.Call(func, *args, **kwargs)

        call.SetLabel(label)

        job = ClientThreading.QtAwareJob(self, self._job_scheduler, window,
                                         initial_delay, call)

        self._job_scheduler.AddJob(job)

        return job
    def test_shared_read(self):

        rwlock = ClientThreading.FileRWLock()

        result_list = []

        HG.test_controller.CallLater(0.0, do_read_job, rwlock, result_list,
                                     '1')
        HG.test_controller.CallLater(0.1, do_read_job, rwlock, result_list,
                                     '2')

        time.sleep(0.2)

        with rwlock.write:

            pass

        results = set(result_list)

        expected_results = set()

        expected_results.update(['begin read 1', 'end read 1'])
        expected_results.update(['begin read 2', 'end read 2'])

        self.assertEqual(results, expected_results)

        #

        result_list = []

        for i in range(10):

            HG.test_controller.CallLater(0.0, do_read_job, rwlock, result_list,
                                         str(i))

        time.sleep(0.2)

        with rwlock.write:

            pass

        expected_results = set()

        for i in range(10):

            expected_results.update(
                ['begin read {}'.format(i), 'end read {}'.format(i)])

        results = set(result_list)

        self.assertEqual(results, expected_results)
 def Run( self ):
     
     job_key = ClientThreading.JobKey( pausable = True, cancellable = True )
     
     job_key.SetVariable( 'popup_title', self._title )
     
     self._controller.pub( 'message', job_key )
     
     job_key.SetVariable( 'popup_text_1', 'preparing source' )
     
     self._source.Prepare()
     
     job_key.SetVariable( 'popup_text_1', 'preparing destination' )
     
     self._destination.Prepare()
     
     job_key.SetVariable( 'popup_text_1', 'beginning work' )
     
     try:
         
         while self._source.StillWorkToDo():
             
             progress_statement = self._destination.DoSomeWork( self._source )
             
             job_key.SetVariable( 'popup_text_1', progress_statement )
             
             job_key.WaitIfNeeded()
             
             if job_key.IsCancelled():
                 
                 break
                 
             
         
     finally:
         
         job_key.SetVariable( 'popup_text_1', 'done, cleaning up source' )
         
         self._source.CleanUp()
         
         job_key.SetVariable( 'popup_text_1', 'done, cleaning up destination' )
         
         self._destination.CleanUp()
         
         job_key.SetVariable( 'popup_text_1', 'done!' )
         
         job_key.Finish()
         
         job_key.Delete( 3 )
Exemple #16
0
def PublishPresentationHashes( publishing_label, hashes, publish_to_popup_button, publish_files_to_page ):
    
    if publish_to_popup_button:
        
        files_job_key = ClientThreading.JobKey()
        
        files_job_key.SetVariable( 'popup_files_mergable', True )
        files_job_key.SetVariable( 'popup_files', ( list( hashes ), publishing_label ) )
        
        HG.client_controller.pub( 'message', files_job_key )
        
    
    if publish_files_to_page:
        
        HG.client_controller.pub( 'imported_files_to_page', list( hashes ), publishing_label )
Exemple #17
0
 def do_it( urls ):
     
     job_key = None
     
     num_urls = len( urls )
     
     if num_urls > 5:
         
         job_key = ClientThreading.JobKey( pausable = True, cancellable = True )
         
         job_key.SetVariable( 'popup_title', 'Opening URLs' )
         
         HG.client_controller.pub( 'message', job_key )
         
     
     try:
         
         for ( i, url ) in enumerate( urls ):
             
             if job_key is not None:
                 
                 ( i_paused, should_quit ) = job_key.WaitIfNeeded()
                 
                 if should_quit:
                     
                     return
                     
                 
                 job_key.SetVariable( 'popup_text_1', HydrusData.ConvertValueRangeToPrettyString( i + 1, num_urls ) )
                 job_key.SetVariable( 'popup_gauge_1', ( i + 1, num_urls ) )
                 
             
             ClientPaths.LaunchURLInWebBrowser( url )
             
             time.sleep( 1 )
             
         
     finally:
         
         if job_key is not None:
             
             job_key.Finish()
             
             job_key.Delete( 1 )
Exemple #18
0
    def DoWork(self):

        if HG.view_shutdown:

            return

        if HC.options['pause_import_folders_sync'] or self._paused:

            return

        checked_folder = False

        did_import_file_work = False

        error_occured = False

        stop_time = HydrusData.GetNow() + 3600

        job_key = ClientThreading.JobKey(pausable=False,
                                         cancellable=True,
                                         stop_time=stop_time)

        try:

            if not os.path.exists(self._path) or not os.path.isdir(self._path):

                raise Exception(
                    'Path "' + self._path +
                    '" does not seem to exist, or is not a directory.')

            pubbed_job_key = False

            job_key.SetVariable('popup_title', 'import folder - ' + self._name)

            due_by_check_now = self._check_now
            due_by_period = self._check_regularly and HydrusData.TimeHasPassed(
                self._last_checked + self._period)

            if due_by_check_now or due_by_period:

                if not pubbed_job_key and self._show_working_popup:

                    HG.client_controller.pub('message', job_key)

                    pubbed_job_key = True

                self._CheckFolder(job_key)

                checked_folder = True

            file_seed = self._file_seed_cache.GetNextFileSeed(
                CC.STATUS_UNKNOWN)

            if file_seed is not None:

                if not pubbed_job_key and self._show_working_popup:

                    HG.client_controller.pub('message', job_key)

                    pubbed_job_key = True

                did_import_file_work = self._ImportFiles(job_key)

        except Exception as e:

            error_occured = True
            self._paused = True

            HydrusData.ShowText(
                'The import folder "' + self._name +
                '" encountered an exception! It has been paused!')
            HydrusData.ShowException(e)

        if checked_folder or did_import_file_work or error_occured:

            HG.client_controller.WriteSynchronous('serialisable', self)

        job_key.Delete()
    def AnalyzeDueTables(self,
                         maintenance_mode=HC.MAINTENANCE_FORCED,
                         stop_time=None,
                         force_reanalyze=False):

        names_to_analyze = self.GetTableNamesDueAnalysis(
            force_reanalyze=force_reanalyze)

        if len(names_to_analyze) > 0:

            job_key = ClientThreading.JobKey(maintenance_mode=maintenance_mode,
                                             cancellable=True)

            try:

                job_key.SetStatusTitle('database maintenance - analyzing')

                HG.client_controller.pub('modal_message', job_key)

                random.shuffle(names_to_analyze)

                for name in names_to_analyze:

                    HG.client_controller.frame_splash_status.SetText(
                        'analyzing ' + name)
                    job_key.SetVariable('popup_text_1', 'analyzing ' + name)

                    time.sleep(0.02)

                    started = HydrusData.GetNowPrecise()

                    self.AnalyzeTable(name)

                    time_took = HydrusData.GetNowPrecise() - started

                    if time_took > 1:

                        HydrusData.Print(
                            'Analyzed ' + name + ' in ' +
                            HydrusData.TimeDeltaToPrettyTimeDelta(time_took))

                    p1 = HG.client_controller.ShouldStopThisWork(
                        maintenance_mode, stop_time=stop_time)
                    p2 = job_key.IsCancelled()

                    if p1 or p2:

                        break

                self._Execute(
                    'ANALYZE sqlite_master;'
                )  # this reloads the current stats into the query planner

                job_key.SetVariable('popup_text_1', 'done!')

                HydrusData.Print(job_key.ToString())

            finally:

                job_key.Finish()

                job_key.Delete(10)
Exemple #20
0
 def work_callable():
     
     job_key = ClientThreading.JobKey( cancellable = True )
     
     title = 'moving files' if action == HC.CONTENT_UPDATE_MOVE else 'adding files'
     
     job_key.SetStatusTitle( title )
     
     BLOCK_SIZE = 64
     
     if len( applicable_media ) > BLOCK_SIZE:
         
         HG.client_controller.pub( 'message', job_key )
         
     
     pauser = HydrusData.BigJobPauser()
     
     num_to_do = len( applicable_media )
     
     now = HydrusData.GetNow()
     
     for ( i, block_of_media ) in enumerate( HydrusData.SplitListIntoChunks( applicable_media, BLOCK_SIZE ) ):
         
         if job_key.IsCancelled():
             
             break
             
         
         job_key.SetVariable( 'popup_text_1', HydrusData.ConvertValueRangeToPrettyString( i * BLOCK_SIZE, num_to_do ) )
         job_key.SetVariable( 'popup_gauge_1', ( i * BLOCK_SIZE, num_to_do ) )
         
         content_updates = []
         undelete_hashes = set()
         
         for m in block_of_media:
             
             if dest_service_key in m.GetLocationsManager().GetDeleted():
                 
                 undelete_hashes.add( m.GetHash() )
                 
             else:
                 
                 content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADD, ( m.GetMediaResult().GetFileInfoManager(), now ) ) )
                 
             
         
         if len( undelete_hashes ) > 0:
             
             content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, undelete_hashes ) )
             
         
         HG.client_controller.WriteSynchronous( 'content_updates', { dest_service_key : content_updates } )
         
         if action == HC.CONTENT_UPDATE_MOVE:
             
             block_of_hashes = [ m.GetHash() for m in block_of_media ]
             
             content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, block_of_hashes, reason = 'Moved to {}'.format( dest_service_name ) ) ]
             
             HG.client_controller.WriteSynchronous( 'content_updates', { source_service_key : content_updates } )
             
         
         pauser.Pause()
         
     
     job_key.Delete()
    def MainLoop(self):

        hashes_still_to_download_in_this_run = set()
        total_hashes_in_this_run = 0
        total_successful_hashes_in_this_run = 0

        while not (HydrusThreading.IsThreadShuttingDown()
                   or self._shutting_down or HG.view_shutdown):

            with self._lock:

                if len(self._pending_hashes) > 0:

                    if total_hashes_in_this_run == 0:

                        job_key = ClientThreading.JobKey(cancellable=True)

                        job_key.SetStatusTitle('downloading')

                        job_key.SetVariable('popup_text_1',
                                            'initialising downloader')

                        job_key_pub_job = self._controller.CallLater(
                            2.0, self._controller.pub, 'message', job_key)

                    num_before = len(hashes_still_to_download_in_this_run)

                    hashes_still_to_download_in_this_run.update(
                        self._pending_hashes)

                    num_after = len(hashes_still_to_download_in_this_run)

                    total_hashes_in_this_run += num_after - num_before

                    self._pending_hashes = set()

            if len(hashes_still_to_download_in_this_run) == 0:

                total_hashes_in_this_run = 0
                total_successful_hashes_in_this_run = 0

                self._new_files_event.wait(5)

                self._new_files_event.clear()

                continue

            if job_key.IsCancelled():

                hashes_still_to_download_in_this_run = set()

                continue

            hash = random.sample(hashes_still_to_download_in_this_run, 1)[0]

            hashes_still_to_download_in_this_run.discard(hash)

            total_done = total_hashes_in_this_run - len(
                hashes_still_to_download_in_this_run)

            job_key.SetVariable(
                'popup_text_1',
                'downloading files from remote services: {}'.format(
                    HydrusData.ConvertValueRangeToPrettyString(
                        total_done, total_hashes_in_this_run)))
            job_key.SetVariable('popup_gauge_1',
                                (total_done, total_hashes_in_this_run))

            try:

                errors_occured = []
                file_successful = False

                media_result = self._controller.Read('media_result', hash)

                service_keys = list(
                    media_result.GetLocationsManager().GetCurrent())

                random.shuffle(service_keys)

                if CC.COMBINED_LOCAL_FILE_SERVICE_KEY in service_keys:

                    total_successful_hashes_in_this_run += 1

                    continue

                for service_key in service_keys:

                    try:

                        service = self._controller.services_manager.GetService(
                            service_key)

                    except:

                        continue

                    try:

                        if service.GetServiceType() == HC.FILE_REPOSITORY:

                            file_repository = service

                            if file_repository.IsFunctional():

                                (os_file_handle,
                                 temp_path) = HydrusTemp.GetTempPath()

                                try:

                                    file_repository.Request(
                                        HC.GET,
                                        'file', {'hash': hash},
                                        temp_path=temp_path)

                                    exclude_deleted = False  # this is the important part here
                                    do_not_check_known_urls_before_importing = False
                                    do_not_check_hashes_before_importing = False
                                    allow_decompression_bombs = True
                                    min_size = None
                                    max_size = None
                                    max_gif_size = None
                                    min_resolution = None
                                    max_resolution = None
                                    automatic_archive = False
                                    associate_primary_urls = True
                                    associate_source_urls = True

                                    file_import_options = FileImportOptions.FileImportOptions(
                                    )

                                    file_import_options.SetPreImportOptions(
                                        exclude_deleted,
                                        do_not_check_known_urls_before_importing,
                                        do_not_check_hashes_before_importing,
                                        allow_decompression_bombs, min_size,
                                        max_size, max_gif_size, min_resolution,
                                        max_resolution)
                                    file_import_options.SetPostImportOptions(
                                        automatic_archive,
                                        associate_primary_urls,
                                        associate_source_urls)

                                    file_import_job = ClientImportFiles.FileImportJob(
                                        temp_path, file_import_options)

                                    file_import_job.DoWork()

                                    file_successful = True

                                    break

                                finally:

                                    HydrusTemp.CleanUpTempPath(
                                        os_file_handle, temp_path)

                        elif service.GetServiceType() == HC.IPFS:

                            multihashes = HG.client_controller.Read(
                                'service_filenames', service_key, {hash})

                            if len(multihashes) > 0:

                                multihash = multihashes[0]

                                service.ImportFile(multihash, silent=True)

                                file_successful = True

                                break

                    except Exception as e:

                        errors_occured.append(e)

                if file_successful:

                    total_successful_hashes_in_this_run += 1

                if len(errors_occured) > 0:

                    if not file_successful:

                        raise errors_occured[0]

            except Exception as e:

                HydrusData.ShowException(e)

                hashes_still_to_download_in_this_run = 0

            finally:

                if len(hashes_still_to_download_in_this_run) == 0:

                    job_key.DeleteVariable('popup_text_1')
                    job_key.DeleteVariable('popup_gauge_1')

                    if total_successful_hashes_in_this_run > 0:

                        job_key.SetVariable(
                            'popup_text_1',
                            HydrusData.ToHumanInt(
                                total_successful_hashes_in_this_run) +
                            ' files downloaded')

                    job_key_pub_job.Cancel()

                    job_key.Finish()

                    job_key.Delete(1)
Exemple #22
0
    def THREADSearchPotentials(self):

        try:

            search_distance = HG.client_controller.new_options.GetInteger(
                'similar_files_duplicate_pairs_search_distance')

            with self._lock:

                if self._similar_files_maintenance_status is None:

                    return

                searched_distances_to_count = self._similar_files_maintenance_status

                total_num_files = sum(searched_distances_to_count.values())

                num_searched = sum(
                    (count
                     for (value, count) in searched_distances_to_count.items()
                     if value is not None and value >= search_distance))

                all_files_searched = num_searched >= total_num_files

                if all_files_searched:

                    return  # no work to do

            num_searched_estimate = num_searched

            HG.client_controller.pub('new_similar_files_maintenance_numbers')

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('searching for potential duplicates')

            HG.client_controller.pub('message', job_key)

            still_work_to_do = True

            while still_work_to_do:

                search_distance = HG.client_controller.new_options.GetInteger(
                    'similar_files_duplicate_pairs_search_distance')

                start_time = HydrusData.GetNowPrecise()

                (still_work_to_do,
                 num_done) = HG.client_controller.WriteSynchronous(
                     'maintain_similar_files_search_for_potential_duplicates',
                     search_distance,
                     maintenance_mode=HC.MAINTENANCE_FORCED,
                     job_key=job_key,
                     work_time_float=0.5)

                time_it_took = HydrusData.GetNowPrecise() - start_time

                num_searched_estimate += num_done

                if num_searched_estimate > total_num_files:

                    similar_files_maintenance_status = HG.client_controller.Read(
                        'similar_files_maintenance_status')

                    if similar_files_maintenance_status is None:

                        break

                    with self._lock:

                        self._similar_files_maintenance_status = similar_files_maintenance_status

                        searched_distances_to_count = self._similar_files_maintenance_status

                        total_num_files = max(
                            num_searched_estimate,
                            sum(searched_distances_to_count.values()))

                text = 'searching: {}'.format(
                    HydrusData.ConvertValueRangeToPrettyString(
                        num_searched_estimate, total_num_files))
                job_key.SetVariable('popup_text_1', text)
                job_key.SetVariable('popup_gauge_1',
                                    (num_searched_estimate, total_num_files))

                if job_key.IsCancelled() or HG.model_shutdown:

                    break

                time.sleep(min(
                    5, time_it_took))  # ideally 0.5s, but potentially longer

            job_key.Delete()

        finally:

            with self._lock:

                self._currently_doing_potentials_search = False

            self.RefreshMaintenanceNumbers()
            self.NotifyNewPotentialsSearchNumbers()
    def MaintainTree(self,
                     maintenance_mode=HC.MAINTENANCE_FORCED,
                     job_key=None,
                     stop_time=None):

        time_started = HydrusData.GetNow()
        pub_job_key = False
        job_key_pubbed = False

        if job_key is None:

            job_key = ClientThreading.JobKey(cancellable=True)

            pub_job_key = True

        try:

            job_key.SetVariable('popup_title',
                                'similar files metadata maintenance')

            rebalance_phash_ids = self._STL(
                self._c.execute(
                    'SELECT phash_id FROM shape_maintenance_branch_regen;'))

            num_to_do = len(rebalance_phash_ids)

            while len(rebalance_phash_ids) > 0:

                if pub_job_key and not job_key_pubbed and HydrusData.TimeHasPassed(
                        time_started + 5):

                    HG.client_controller.pub('modal_message', job_key)

                    job_key_pubbed = True

                (i_paused, should_quit) = job_key.WaitIfNeeded()

                should_stop = HG.client_controller.ShouldStopThisWork(
                    maintenance_mode, stop_time=stop_time)

                if should_quit or should_stop:

                    return

                num_done = num_to_do - len(rebalance_phash_ids)

                text = 'rebalancing similar file metadata - ' + HydrusData.ConvertValueRangeToPrettyString(
                    num_done, num_to_do)

                HG.client_controller.frame_splash_status.SetSubtext(text)
                job_key.SetVariable('popup_text_1', text)
                job_key.SetVariable('popup_gauge_1', (num_done, num_to_do))

                with HydrusDB.TemporaryIntegerTable(
                        self._c, rebalance_phash_ids,
                        'phash_id') as temp_table_name:

                    # temp phashes to tree
                    (biggest_phash_id, ) = self._c.execute(
                        'SELECT phash_id FROM {} CROSS JOIN shape_vptree USING ( phash_id ) ORDER BY inner_population + outer_population DESC;'
                        .format(temp_table_name)).fetchone()

                self._RegenerateBranch(job_key, biggest_phash_id)

                rebalance_phash_ids = self._STL(
                    self._c.execute(
                        'SELECT phash_id FROM shape_maintenance_branch_regen;')
                )

        finally:

            job_key.SetVariable('popup_text_1', 'done!')
            job_key.DeleteVariable('popup_gauge_1')
            job_key.DeleteVariable(
                'popup_text_2')  # used in the regenbranch call

            job_key.Finish()

            job_key.Delete(5)
Exemple #24
0
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('file export')

            HG.client_controller.pub('message', job_key)

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media, path)) in enumerate(to_do):

                if job_key.IsCancelled():

                    break

                try:

                    x_of_y = HydrusData.ConvertValueRangeToPrettyString(
                        index + 1, num_to_do)

                    job_key.SetVariable('popup_text_1',
                                        'Done {}'.format(x_of_y))
                    job_key.SetVariable('popup_gauge_1',
                                        (index + 1, num_to_do))

                    QP.CallAfter(qt_update_label, x_of_y)

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key, ClientTags.TAG_DISPLAY_ACTUAL)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWriteable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if not job_key.IsCancelled() and delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                    'delete_lock_for_archived_files')

                if delete_lock_for_archived_files:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                        if not media.HasArchive()
                    }

                else:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                    }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            job_key.DeleteVariable('popup_gauge_1')
            job_key.SetVariable('popup_text_1', 'Done!')

            job_key.Finish()

            job_key.Delete(5)

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)