Exemple #1
0
def GetContentUpdatesForAppliedContentApplicationCommandRatingsIncDec( service_key: bytes, one_star_value: float, action: int, media: typing.Collection[ ClientMedia.MediaSingleton ] ):
    
    if action == HC.CONTENT_UPDATE_INCREMENT:
        
        direction = 1
        initialisation_rating = 0.0
        
    elif action == HC.CONTENT_UPDATE_DECREMENT:
        
        direction = -1
        initialisation_rating = 1.0
        
    else:
        
        return []
        
    
    ratings_to_hashes = collections.defaultdict( set )
    
    for m in media:
        
        ratings_manager = m.GetRatingsManager()
        
        current_rating = ratings_manager.GetRating( service_key )
        
        if current_rating is None:
            
            new_rating = initialisation_rating
            
        else:
            
            new_rating = current_rating + ( one_star_value * direction )
            
            new_rating = max( min( new_rating, 1.0 ), 0.0 )
            
        
        if current_rating != new_rating:
            
            ratings_to_hashes[ new_rating ].add( m.GetHash() )
            
        
    
    content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( rating, hashes ) ) for ( rating, hashes ) in ratings_to_hashes.items() ]
    
    return content_updates
    def _SetSelected(self, status_to_set):

        file_seeds = self._list_ctrl.GetData(only_selected=True)

        if status_to_set == CC.STATUS_UNKNOWN:

            deleted_and_clearable_file_seeds = [
                file_seed for file_seed in file_seeds
                if file_seed.IsDeleted() and file_seed.HasHash()
            ]

            if len(deleted_and_clearable_file_seeds) > 0:

                message = 'One or more of these files did not import due to being previously deleted. They will likely fail again unless you erase those deletion records. Would you like to do this now?'

                result = ClientGUIDialogsQuick.GetYesNo(self, message)

                if result == QW.QDialog.DialogCode.Accepted:

                    deletee_hashes = {
                        file_seed.GetHash()
                        for file_seed in deleted_and_clearable_file_seeds
                    }

                    from hydrus.client.gui import ClientGUIMediaActions

                    ClientGUIMediaActions.UndeleteFiles(deletee_hashes)

                    content_update_erase_record = HydrusData.ContentUpdate(
                        HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADVANCED,
                        ('delete_deleted', deletee_hashes))

                    service_keys_to_content_updates = {
                        CC.COMBINED_LOCAL_FILE_SERVICE_KEY:
                        [content_update_erase_record]
                    }

                    HG.client_controller.WriteSynchronous(
                        'content_updates', service_keys_to_content_updates)

        for file_seed in file_seeds:

            file_seed.SetStatus(status_to_set)

        self._file_seed_cache.NotifyFileSeedsUpdated(file_seeds)
Exemple #3
0
def GetContentUpdatesForAppliedContentApplicationCommandRatingsSetFlip( service_key: bytes, action: int, media: typing.Collection[ ClientMedia.MediaSingleton ], rating: typing.Optional[ float ] ):
    
    hashes = set()
    
    for m in media:
        
        hashes.add( m.GetHash() )
        
    
    can_set = False
    can_unset = False
    
    for m in media:
        
        ratings_manager = m.GetRatingsManager()
        
        current_rating = ratings_manager.GetRating( service_key )
        
        if current_rating == rating and action == HC.CONTENT_UPDATE_FLIP:
            
            can_unset = True
            
        else:
            
            can_set = True
            
        
    
    if can_set:
        
        row = ( rating, hashes )
        
    elif can_unset:
        
        row = ( None, hashes )
        
    else:
        
        return []
        
    
    content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, row ) ]
    
    return content_updates
Exemple #4
0
 def _FilterServiceKeysToContentUpdates( self, service_keys_to_content_updates ):
     
     filtered_service_keys_to_content_updates = {}
     
     for ( service_key, content_updates ) in service_keys_to_content_updates.items():
         
         filtered_content_updates = []
         
         for content_update in content_updates:
             
             ( data_type, action, row ) = content_update.ToTuple()
             
             if data_type == HC.CONTENT_TYPE_FILES:
                 
                 if action in ( HC.CONTENT_UPDATE_ADD, HC.CONTENT_UPDATE_DELETE, HC.CONTENT_UPDATE_UNDELETE, HC.CONTENT_UPDATE_RESCIND_PETITION, HC.CONTENT_UPDATE_ADVANCED ):
                     
                     continue
                     
                 
             elif data_type == HC.CONTENT_TYPE_MAPPINGS:
                 
                 if action in ( HC.CONTENT_UPDATE_RESCIND_PETITION, HC.CONTENT_UPDATE_ADVANCED ):
                     
                     continue
                     
                 
             else:
                 
                 continue
                 
             
             filtered_content_update = HydrusData.ContentUpdate( data_type, action, row )
             
             filtered_content_updates.append( filtered_content_update )
             
         
         if len( filtered_content_updates ) > 0:
             
             filtered_service_keys_to_content_updates[ service_key ] = filtered_content_updates
             
         
     
     return filtered_service_keys_to_content_updates
Exemple #5
0
    def _PubSubRow(self, hash, canvas_type, row):

        (view_timestamp, views_delta, viewtime_delta) = row

        pubsub_row = (hash, canvas_type, view_timestamp, views_delta,
                      viewtime_delta)

        content_update = HydrusData.ContentUpdate(
            HC.CONTENT_TYPE_FILE_VIEWING_STATS, HC.CONTENT_UPDATE_ADD,
            pubsub_row)

        service_keys_to_content_updates = {
            CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [content_update]
        }

        HG.client_controller.pub('content_updates_data',
                                 service_keys_to_content_updates)
        HG.client_controller.pub('content_updates_gui',
                                 service_keys_to_content_updates)
Exemple #6
0
    def DoSomeWork(self, source):

        time_started_precise = HydrusData.GetNowPrecise()

        data = source.GetSomeData()

        content_updates = []

        pairs = []

        for (hash, tags) in data:

            pairs.extend(((tag, hash) for tag in tags))

        num_done = len(pairs)

        tags_to_hashes = HydrusData.BuildKeyToListDict(pairs)

        if self._content_action == HC.CONTENT_UPDATE_PETITION:

            reason = 'Mass Migration Job'

        else:

            reason = None

        for (tag, hashes) in tags_to_hashes.items():

            content_updates.append(
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                         self._content_action, (tag, hashes),
                                         reason=reason))

        service_keys_to_content_updates = {
            self._tag_service_key: content_updates
        }

        self._controller.WriteSynchronous('content_updates',
                                          service_keys_to_content_updates)

        return GetBasicSpeedStatement(num_done, time_started_precise)
Exemple #7
0
 def _InvertServiceKeysToContentUpdates( self, service_keys_to_content_updates ):
     
     inverted_service_keys_to_content_updates = {}
     
     for ( service_key, content_updates ) in service_keys_to_content_updates.items():
         
         inverted_content_updates = []
         
         for content_update in content_updates:
             
             ( data_type, action, row ) = content_update.ToTuple()
             
             inverted_row = row
             
             if data_type == HC.CONTENT_TYPE_FILES:
                 
                 if action == HC.CONTENT_UPDATE_ARCHIVE: inverted_action = HC.CONTENT_UPDATE_INBOX
                 elif action == HC.CONTENT_UPDATE_INBOX: inverted_action = HC.CONTENT_UPDATE_ARCHIVE
                 elif action == HC.CONTENT_UPDATE_PEND: inverted_action = HC.CONTENT_UPDATE_RESCIND_PEND
                 elif action == HC.CONTENT_UPDATE_RESCIND_PEND: inverted_action = HC.CONTENT_UPDATE_PEND
                 elif action == HC.CONTENT_UPDATE_PETITION: inverted_action = HC.CONTENT_UPDATE_RESCIND_PETITION
                 
             elif data_type == HC.CONTENT_TYPE_MAPPINGS:
                 
                 if action == HC.CONTENT_UPDATE_ADD: inverted_action = HC.CONTENT_UPDATE_DELETE
                 elif action == HC.CONTENT_UPDATE_DELETE: inverted_action = HC.CONTENT_UPDATE_ADD
                 elif action == HC.CONTENT_UPDATE_PEND: inverted_action = HC.CONTENT_UPDATE_RESCIND_PEND
                 elif action == HC.CONTENT_UPDATE_RESCIND_PEND: inverted_action = HC.CONTENT_UPDATE_PEND
                 elif action == HC.CONTENT_UPDATE_PETITION: inverted_action = HC.CONTENT_UPDATE_RESCIND_PETITION
                 
             
             inverted_content_update = HydrusData.ContentUpdate( data_type, inverted_action, inverted_row )
             
             inverted_content_updates.append( inverted_content_update )
             
         
         inverted_service_keys_to_content_updates[ service_key ] = inverted_content_updates
         
     
     return inverted_service_keys_to_content_updates
Exemple #8
0
 def Flush( self ):
     
     with self._lock:
         
         if len( self._pending_updates ) > 0:
             
             content_updates = []
             
             for ( hash, ( preview_views_delta, preview_viewtime_delta, media_views_delta, media_viewtime_delta ) ) in self._pending_updates.items():
                 
                 row = ( hash, preview_views_delta, preview_viewtime_delta, media_views_delta, media_viewtime_delta )
                 
                 content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILE_VIEWING_STATS, HC.CONTENT_UPDATE_ADD, row )
                 
                 content_updates.append( content_update )
                 
             
             service_keys_to_content_updates = { CC.COMBINED_LOCAL_FILE_SERVICE_KEY : content_updates }
             
             # non-synchronous
             self._controller.Write( 'content_updates', service_keys_to_content_updates, do_pubsubs = False )
             
             self._pending_updates = {}
def UndeleteFiles(hashes):

    local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys(
        (HC.LOCAL_FILE_DOMAIN, ))

    for chunk_of_hashes in HydrusData.SplitIteratorIntoChunks(hashes, 64):

        media_results = HG.client_controller.Read('media_results',
                                                  chunk_of_hashes)

        service_keys_to_hashes = collections.defaultdict(list)

        for media_result in media_results:

            locations_manager = media_result.GetLocationsManager()

            if CC.TRASH_SERVICE_KEY not in locations_manager.GetCurrent():

                continue

            hash = media_result.GetHash()

            for service_key in locations_manager.GetDeleted().intersection(
                    local_file_service_keys):

                service_keys_to_hashes[service_key].append(hash)

        for (service_key, service_hashes) in service_keys_to_hashes.items():

            content_update = HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE,
                service_hashes)

            service_keys_to_content_updates = {service_key: [content_update]}

            HG.client_controller.WriteSynchronous(
                'content_updates', service_keys_to_content_updates)
Exemple #10
0
def ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
        hashes, service_keys_to_tags):

    service_keys_to_content_updates = {}

    for (service_key, tags) in service_keys_to_tags.items():

        if len(tags) == 0:

            continue

        try:

            service = HG.client_controller.services_manager.GetService(
                service_key)

        except HydrusExceptions.DataMissing:

            continue

        if service.GetServiceType() == HC.LOCAL_TAG:

            action = HC.CONTENT_UPDATE_ADD

        else:

            action = HC.CONTENT_UPDATE_PEND

        content_updates = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS, action,
                                     (tag, hashes)) for tag in tags
        ]

        service_keys_to_content_updates[service_key] = content_updates

    return service_keys_to_content_updates
Exemple #11
0
        def GetContentUpdates(self):

            service_keys_to_content_updates = {}

            hashes = {
                hash
                for hash in itertools.chain.from_iterable((
                    media.GetHashes() for media in self._media))
            }

            for (service_key,
                 control) in list(self._service_keys_to_controls.items()):

                (original_rating_state, original_rating
                 ) = self._service_keys_to_original_ratings_states[service_key]

                rating_state = control.GetRatingState()

                if rating_state == ClientRatings.NULL:

                    rating = None

                else:

                    rating = control.GetRating()

                if rating != original_rating:

                    content_update = HydrusData.ContentUpdate(
                        HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD,
                        (rating, hashes))

                    service_keys_to_content_updates[service_key] = (
                        content_update, )

            return service_keys_to_content_updates
Exemple #12
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HG.client_controller.new_options.GetBoolean( 'pause_export_folders_sync' ) or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         try:
             
             source_path = client_files_manager.GetFilePath( hash, mime )
             
         except HydrusExceptions.FileMissingException:
             
             raise Exception( 'A file to be exported, hash "{}", was missing! You should run file maintenance (under database->maintenance->files) to check the files for the export folder\'s search, and possibly all your files.' )
             
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.TryToGiveFileNicePermissionBits( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         local_file_service_keys = HG.client_controller.services_manager.GetServiceKeys( ( HC.LOCAL_FILE_DOMAIN, ) )
         
         service_keys_to_deletee_hashes = collections.defaultdict( list )
         
         delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean( 'delete_lock_for_archived_files' )
         
         for media_result in media_results:
             
             if delete_lock_for_archived_files and not media_result.GetInbox():
                 
                 continue
                 
             
             hash = media_result.GetHash()
             
             deletee_service_keys = media_result.GetLocationsManager().GetCurrent().intersection( local_file_service_keys )
             
             for deletee_service_key in deletee_service_keys:
                 
                 service_keys_to_deletee_hashes[ deletee_service_key ].append( hash )
                 
             
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         for ( service_key, deletee_hashes ) in service_keys_to_deletee_hashes.items():
             
             chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
             
             for chunk_of_hashes in chunks_of_hashes:
                 
                 content_update = HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason )
                 
                 HG.client_controller.WriteSynchronous( 'content_updates', { service_key : [ content_update ] } )
    def ProcessPairIntoContentUpdates(self,
                                      first_media,
                                      second_media,
                                      delete_first=False,
                                      delete_second=False,
                                      delete_both=False,
                                      file_deletion_reason=None):

        if file_deletion_reason is None:

            file_deletion_reason = 'unknown reason'

        service_keys_to_content_updates = collections.defaultdict(list)

        first_hashes = first_media.GetHashes()
        second_hashes = second_media.GetHashes()

        #

        services_manager = HG.client_controller.services_manager

        for (service_key, action, tag_filter) in self._tag_service_actions:

            content_updates = []

            try:

                service = services_manager.GetService(service_key)

            except HydrusExceptions.DataMissing:

                continue

            service_type = service.GetServiceType()

            if service_type == HC.LOCAL_TAG:

                add_content_action = HC.CONTENT_UPDATE_ADD

            elif service_type == HC.TAG_REPOSITORY:

                add_content_action = HC.CONTENT_UPDATE_PEND

            first_tags = first_media.GetTagsManager().GetCurrentAndPending(
                service_key, ClientTags.TAG_DISPLAY_STORAGE)
            second_tags = second_media.GetTagsManager().GetCurrentAndPending(
                service_key, ClientTags.TAG_DISPLAY_STORAGE)

            first_tags = tag_filter.Filter(first_tags)
            second_tags = tag_filter.Filter(second_tags)

            if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:

                first_needs = second_tags.difference(first_tags)
                second_needs = first_tags.difference(second_tags)

                content_updates.extend(
                    (HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                              add_content_action,
                                              (tag, first_hashes))
                     for tag in first_needs))
                content_updates.extend(
                    (HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                              add_content_action,
                                              (tag, second_hashes))
                     for tag in second_needs))

            elif action == HC.CONTENT_MERGE_ACTION_COPY:

                first_needs = second_tags.difference(first_tags)

                content_updates.extend(
                    (HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                              add_content_action,
                                              (tag, first_hashes))
                     for tag in first_needs))

            elif service_type == HC.LOCAL_TAG and action == HC.CONTENT_MERGE_ACTION_MOVE:

                first_needs = second_tags.difference(first_tags)

                content_updates.extend(
                    (HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                              add_content_action,
                                              (tag, first_hashes))
                     for tag in first_needs))
                content_updates.extend(
                    (HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                              HC.CONTENT_UPDATE_DELETE,
                                              (tag, second_hashes))
                     for tag in second_tags))

            if len(content_updates) > 0:

                service_keys_to_content_updates[service_key].extend(
                    content_updates)

        def worth_updating_rating(source_rating, dest_rating):

            if source_rating is not None:

                if dest_rating is None or source_rating > dest_rating:

                    return True

            return False

        for (service_key, action) in self._rating_service_actions:

            content_updates = []

            try:

                service = services_manager.GetService(service_key)

            except HydrusExceptions.DataMissing:

                continue

            first_current_value = first_media.GetRatingsManager().GetRating(
                service_key)
            second_current_value = second_media.GetRatingsManager().GetRating(
                service_key)

            if action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:

                if worth_updating_rating(first_current_value,
                                         second_current_value):

                    content_updates.append(
                        HydrusData.ContentUpdate(
                            HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD,
                            (first_current_value, second_hashes)))

                elif worth_updating_rating(second_current_value,
                                           first_current_value):

                    content_updates.append(
                        HydrusData.ContentUpdate(
                            HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD,
                            (second_current_value, first_hashes)))

            elif action == HC.CONTENT_MERGE_ACTION_COPY:

                if worth_updating_rating(second_current_value,
                                         first_current_value):

                    content_updates.append(
                        HydrusData.ContentUpdate(
                            HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD,
                            (second_current_value, first_hashes)))

            elif action == HC.CONTENT_MERGE_ACTION_MOVE:

                if second_current_value is not None:

                    if worth_updating_rating(second_current_value,
                                             first_current_value):

                        content_updates.append(
                            HydrusData.ContentUpdate(
                                HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD,
                                (second_current_value, first_hashes)))

                    content_updates.append(
                        HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                                 HC.CONTENT_UPDATE_ADD,
                                                 (None, second_hashes)))

            if len(content_updates) > 0:

                service_keys_to_content_updates[service_key].extend(
                    content_updates)

        #

        if self._sync_archive:

            if first_media.HasInbox() and second_media.HasArchive():

                content_update = HydrusData.ContentUpdate(
                    HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE,
                    first_hashes)

                service_keys_to_content_updates[
                    CC.COMBINED_LOCAL_FILE_SERVICE_KEY].append(content_update)

            elif first_media.HasArchive() and second_media.HasInbox():

                content_update = HydrusData.ContentUpdate(
                    HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ARCHIVE,
                    second_hashes)

                service_keys_to_content_updates[
                    CC.COMBINED_LOCAL_FILE_SERVICE_KEY].append(content_update)

        #

        if self._sync_urls_action is not None:

            first_urls = set(first_media.GetLocationsManager().GetURLs())
            second_urls = set(second_media.GetLocationsManager().GetURLs())

            content_updates = []

            if self._sync_urls_action == HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE:

                first_needs = second_urls.difference(first_urls)
                second_needs = first_urls.difference(second_urls)

                if len(first_needs) > 0:

                    content_updates.append(
                        HydrusData.ContentUpdate(HC.CONTENT_TYPE_URLS,
                                                 HC.CONTENT_UPDATE_ADD,
                                                 (first_needs, first_hashes)))

                if len(second_needs) > 0:

                    content_updates.append(
                        HydrusData.ContentUpdate(
                            HC.CONTENT_TYPE_URLS, HC.CONTENT_UPDATE_ADD,
                            (second_needs, second_hashes)))

            elif self._sync_urls_action == HC.CONTENT_MERGE_ACTION_COPY:

                first_needs = second_urls.difference(first_urls)

                if len(first_needs) > 0:

                    content_updates.append(
                        HydrusData.ContentUpdate(HC.CONTENT_TYPE_URLS,
                                                 HC.CONTENT_UPDATE_ADD,
                                                 (first_needs, first_hashes)))

            if len(content_updates) > 0:

                service_keys_to_content_updates[
                    CC.COMBINED_LOCAL_FILE_SERVICE_KEY].extend(content_updates)

        #

        deletee_media = []

        if delete_first or delete_second or delete_both:

            if delete_first or delete_both:

                deletee_media.append(first_media)

            if delete_second or delete_both:

                deletee_media.append(second_media)

        for media in deletee_media:

            current_locations = media.GetLocationsManager().GetCurrent()

            if CC.LOCAL_FILE_SERVICE_KEY in current_locations:

                deletee_service_key = CC.LOCAL_FILE_SERVICE_KEY

            elif CC.TRASH_SERVICE_KEY in current_locations:

                deletee_service_key = CC.TRASH_SERVICE_KEY

            else:

                deletee_service_key = None

            if deletee_service_key is not None:

                content_update = HydrusData.ContentUpdate(
                    HC.CONTENT_TYPE_FILES,
                    HC.CONTENT_UPDATE_DELETE,
                    media.GetHashes(),
                    reason=file_deletion_reason)

                service_keys_to_content_updates[deletee_service_key].append(
                    content_update)

        #

        return service_keys_to_content_updates
Exemple #14
0
 def GetServiceKeysToContentUpdates( self, media_result: ClientMediaResult.MediaResult, names_and_notes: typing.Collection[ typing.Tuple[ str, str ] ] ):
     
     content_updates = []
     
     if self._get_notes:
         
         hash = media_result.GetHash()
         
         notes_manager = media_result.GetNotesManager()
         
         existing_names_to_notes = dict( notes_manager.GetNamesToNotes() )
         
         for ( name, note ) in names_and_notes:
             
             if name in self._names_to_name_overrides:
                 
                 name = self._names_to_name_overrides[ name ]
                 
             elif self._all_name_override is not None:
                 
                 name = self._all_name_override
                 
             
             if name in existing_names_to_notes:
                 
                 name_exists = True
                 
                 existing_note = existing_names_to_notes[ name ]
                 
                 name_and_note_exists = existing_note == note
                 
                 new_note_is_an_extension = existing_note in note
                 
             else:
                 
                 name_exists = False
                 name_and_note_exists = False
                 new_note_is_an_extension = False
                 
             
             do_it = True
             
             if name_and_note_exists:
                 
                 do_it = False
                 
             elif name_exists:
                 
                 if new_note_is_an_extension and self._extend_existing_note_if_possible:
                     
                     pass # yes let's do it with current name and note
                     
                 else:
                     
                     if self._conflict_resolution == NOTE_IMPORT_CONFLICT_IGNORE:
                         
                         do_it = False
                         
                     elif self._conflict_resolution == NOTE_IMPORT_CONFLICT_RENAME:
                         
                         existing_names = set( existing_names_to_notes.keys() )
                         
                         name = HydrusData.GetNonDupeName( name, existing_names )
                         
                     elif self._conflict_resolution == NOTE_IMPORT_CONFLICT_APPEND:
                         
                         existing_note = existing_names_to_notes[ name ]
                         
                         sep = os.linesep * 2
                         
                         note = sep.join( ( existing_note, note ) )
                         
                     
                 
             
             if do_it:
                 
                 existing_names_to_notes[ name ] = note
                 
                 content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_NOTES, HC.CONTENT_UPDATE_SET, ( hash, name, note ) ) )
                 
             
         
     
     service_keys_to_content_updates = {}
     
     if len( content_updates ) > 0:
         
         service_keys_to_content_updates[ CC.LOCAL_NOTES_SERVICE_KEY ] = content_updates
         
     
     return service_keys_to_content_updates
Exemple #15
0
def DAEMONMaintainTrash(controller):

    if HC.options['trash_max_size'] is not None:

        max_size = HC.options['trash_max_size'] * 1048576

        service_info = controller.Read('service_info', CC.TRASH_SERVICE_KEY)

        while service_info[HC.SERVICE_INFO_TOTAL_SIZE] > max_size:

            if HydrusThreading.IsThreadShuttingDown():

                return

            hashes = controller.Read('trash_hashes', limit=10)

            if len(hashes) == 0:

                return

            content_update = HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                                      HC.CONTENT_UPDATE_DELETE,
                                                      hashes)

            service_keys_to_content_updates = {
                CC.TRASH_SERVICE_KEY: [content_update]
            }

            controller.WriteSynchronous('content_updates',
                                        service_keys_to_content_updates)

            service_info = controller.Read('service_info',
                                           CC.TRASH_SERVICE_KEY)

            time.sleep(2)

    if HC.options['trash_max_age'] is not None:

        max_age = HC.options['trash_max_age'] * 3600

        hashes = controller.Read('trash_hashes', limit=10, minimum_age=max_age)

        while len(hashes) > 0:

            if HydrusThreading.IsThreadShuttingDown():

                return

            content_update = HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                                      HC.CONTENT_UPDATE_DELETE,
                                                      hashes)

            service_keys_to_content_updates = {
                CC.TRASH_SERVICE_KEY: [content_update]
            }

            controller.WriteSynchronous('content_updates',
                                        service_keys_to_content_updates)

            hashes = controller.Read('trash_hashes',
                                     limit=10,
                                     minimum_age=max_age)

            time.sleep(2)
    def test_dict_to_content_updates(self):

        hash = HydrusData.GenerateKey()

        hashes = {hash}

        local_key = CC.DEFAULT_LOCAL_TAG_SERVICE_KEY
        remote_key = HG.test_controller.example_tag_repo_service_key

        service_keys_to_tags = ClientTags.ServiceKeysToTags({local_key: {'a'}})

        content_updates = {
            local_key: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                         HC.CONTENT_UPDATE_ADD, ('a', hashes))
            ]
        }

        self.assertEqual(
            ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                {hash}, service_keys_to_tags), content_updates)

        service_keys_to_tags = ClientTags.ServiceKeysToTags(
            {remote_key: {'c'}})

        content_updates = {
            remote_key: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                         HC.CONTENT_UPDATE_PEND, ('c', hashes))
            ]
        }

        self.assertEqual(
            ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                {hash}, service_keys_to_tags), content_updates)

        service_keys_to_tags = ClientTags.ServiceKeysToTags({
            local_key: ['a', 'character:b'],
            remote_key: ['c', 'series:d']
        })

        content_updates = {}

        content_updates[local_key] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD, ('a', hashes)),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('character:b', hashes))
        ]
        content_updates[remote_key] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND, ('c', hashes)),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND,
                                     ('series:d', hashes))
        ]

        self.assertEqual(
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND, 'c'),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND, 'c'))
        self.assertEqual(
            ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                {hash}, service_keys_to_tags), content_updates)
def ApplyContentApplicationCommandToMedia( parent, command, media ):
    
    data = command.GetData()
    
    ( service_key, content_type, action, value ) = data
    
    try:
        
        service = HG.client_controller.services_manager.GetService( service_key )
        
    except HydrusExceptions.DataMissing:
        
        command_processed = False
        
        return command_processed
        
    
    service_type = service.GetServiceType()
    
    hashes = set()
    
    for m in media:
        
        hashes.add( m.GetHash() )
        
    
    if service_type in HC.REAL_TAG_SERVICES:
        
        tag = value
        
        can_add = False
        can_pend = False
        can_delete = False
        can_petition = True
        can_rescind_pend = False
        can_rescind_petition = False
        
        for m in media:
            
            tags_manager = m.GetTagsManager()
            
            current = tags_manager.GetCurrent( service_key, ClientTags.TAG_DISPLAY_STORAGE )
            pending = tags_manager.GetPending( service_key, ClientTags.TAG_DISPLAY_STORAGE )
            petitioned = tags_manager.GetPetitioned( service_key, ClientTags.TAG_DISPLAY_STORAGE )
            
            if tag not in current:
                
                can_add = True
                
            
            if tag not in current and tag not in pending:
                
                can_pend = True
                
            
            if tag in current and action == HC.CONTENT_UPDATE_FLIP:
                
                can_delete = True
                
            
            if tag in current and tag not in petitioned and action == HC.CONTENT_UPDATE_FLIP:
                
                can_petition = True
                
            
            if tag in pending and action == HC.CONTENT_UPDATE_FLIP:
                
                can_rescind_pend = True
                
            
            if tag in petitioned:
                
                can_rescind_petition = True
                
            
        
        reason = None
        
        if service_type == HC.LOCAL_TAG:
            
            tags = [ tag ]
            
            if can_add:
                
                content_update_action = HC.CONTENT_UPDATE_ADD
                
                tag_parents_manager = HG.client_controller.tag_parents_manager
                
                parents = tag_parents_manager.GetParents( service_key, tag )
                
                tags.extend( parents )
                
            elif can_delete:
                
                content_update_action = HC.CONTENT_UPDATE_DELETE
                
            else:
                
                return True
                
            
            rows = [ ( tag, hashes ) for tag in tags ]
            
        else:
            
            if can_rescind_petition:
                
                content_update_action = HC.CONTENT_UPDATE_RESCIND_PETITION
                
                rows = [ ( tag, hashes ) ]
                
            elif can_pend:
                
                tags = [ tag ]
                
                content_update_action = HC.CONTENT_UPDATE_PEND
                
                tag_parents_manager = HG.client_controller.tag_parents_manager
                
                parents = tag_parents_manager.GetParents( service_key, tag )
                
                tags.extend( parents )
                
                rows = [ ( tag, hashes ) for tag in tags ]
                
            elif can_rescind_pend:
                
                content_update_action = HC.CONTENT_UPDATE_RESCIND_PEND
                
                rows = [ ( tag, hashes ) ]
                
            elif can_petition:
                
                message = 'Enter a reason for this tag to be removed. A janitor will review your petition.'
                
                from hydrus.client.gui import ClientGUIDialogs
                
                with ClientGUIDialogs.DialogTextEntry( parent, message ) as dlg:
                    
                    if dlg.exec() == QW.QDialog.Accepted:
                        
                        content_update_action = HC.CONTENT_UPDATE_PETITION
                        
                        reason = dlg.GetValue()
                        
                        rows = [ ( tag, hashes ) ]
                        
                    else:
                        
                        return True
                        
                    
                
            else:
                
                return True
                
            
        
        content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, content_update_action, row, reason = reason ) for row in rows ]
        
    elif service_type in ( HC.LOCAL_RATING_LIKE, HC.LOCAL_RATING_NUMERICAL ):
        
        if action in ( HC.CONTENT_UPDATE_SET, HC.CONTENT_UPDATE_FLIP ):
            
            rating = value
            
            can_set = False
            can_unset = False
            
            for m in media:
                
                ratings_manager = m.GetRatingsManager()
                
                current_rating = ratings_manager.GetRating( service_key )
                
                if current_rating == rating and action == HC.CONTENT_UPDATE_FLIP:
                    
                    can_unset = True
                    
                else:
                    
                    can_set = True
                    
                
            
            if can_set:
                
                row = ( rating, hashes )
                
            elif can_unset:
                
                row = ( None, hashes )
                
            else:
                
                return True
                
            
            content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, row ) ]
            
        elif action in ( HC.CONTENT_UPDATE_INCREMENT, HC.CONTENT_UPDATE_DECREMENT ):
            
            if service_type == HC.LOCAL_RATING_NUMERICAL:
                
                if action == HC.CONTENT_UPDATE_INCREMENT:
                    
                    direction = 1
                    initialisation_rating = 0.0
                    
                elif action == HC.CONTENT_UPDATE_DECREMENT:
                    
                    direction = -1
                    initialisation_rating = 1.0
                    
                
                num_stars = service.GetNumStars()
                
                if service.AllowZero():
                    
                    num_stars += 1
                    
                
                one_star_value = 1.0 / ( num_stars - 1 )
                
                ratings_to_hashes = collections.defaultdict( set )
                
                for m in media:
                    
                    ratings_manager = m.GetRatingsManager()
                    
                    current_rating = ratings_manager.GetRating( service_key )
                    
                    if current_rating is None:
                        
                        new_rating = initialisation_rating
                        
                    else:
                        
                        new_rating = current_rating + ( one_star_value * direction )
                        
                        new_rating = max( min( new_rating, 1.0 ), 0.0 )
                        
                    
                    if current_rating != new_rating:
                        
                        ratings_to_hashes[ new_rating ].add( m.GetHash() )
                        
                    
                
                content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( rating, hashes ) ) for ( rating, hashes ) in ratings_to_hashes.items() ]
                
            else:
                
                return True
                
            
        
    else:
        
        return False
        
    
    if len( content_updates ) > 0:
        
        HG.client_controller.Write( 'content_updates', { service_key : content_updates } )
        
    
    return True
Exemple #18
0
    def DoWork(self, status_hook=None) -> FileImportStatus:

        if HG.file_import_report_mode:

            HydrusData.ShowText('File import job starting work.')

        self.GeneratePreImportHashAndStatus(status_hook=status_hook)

        if self._pre_import_file_status.ShouldImport(
                self._file_import_options):

            self.GenerateInfo(status_hook=status_hook)

            try:

                self.CheckIsGoodToImport()

                ok_to_go = True

            except HydrusExceptions.FileImportRulesException as e:

                ok_to_go = False

                not_ok_file_import_status = self._pre_import_file_status.Duplicate(
                )

                not_ok_file_import_status.status = CC.STATUS_VETOED
                not_ok_file_import_status.note = str(e)

            if ok_to_go:

                hash = self._pre_import_file_status.hash
                mime = self._pre_import_file_status.mime

                if status_hook is not None:

                    status_hook('copying file into file storage')

                HG.client_controller.client_files_manager.AddFile(
                    hash,
                    mime,
                    self._temp_path,
                    thumbnail_bytes=self._thumbnail_bytes)

                if status_hook is not None:

                    status_hook('importing to database')

                self._file_import_options.CheckReadyToImport()

                self._post_import_file_status = HG.client_controller.WriteSynchronous(
                    'import_file', self)

            else:

                self._post_import_file_status = not_ok_file_import_status

        else:

            # if the file is already in the database but not in all the desired file services, let's push content updates to make it happen
            if self._pre_import_file_status.status == CC.STATUS_SUCCESSFUL_BUT_REDUNDANT:

                media_result = HG.client_controller.Read(
                    'media_result', self._pre_import_file_status.hash)

                destination_location_context = self._file_import_options.GetDestinationLocationContext(
                )

                desired_file_service_keys = destination_location_context.current_service_keys
                current_file_service_keys = media_result.GetLocationsManager(
                ).GetCurrent()

                file_service_keys_to_add_to = set(
                    desired_file_service_keys).difference(
                        current_file_service_keys)

                if len(file_service_keys_to_add_to) > 0:

                    file_info_manager = media_result.GetFileInfoManager()
                    now = HydrusData.GetNow()

                    service_keys_to_content_updates = {}

                    for service_key in file_service_keys_to_add_to:

                        service_keys_to_content_updates[service_key] = [
                            HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                                     HC.CONTENT_UPDATE_ADD,
                                                     (file_info_manager, now))
                        ]

                    HG.client_controller.WriteSynchronous(
                        'content_updates', service_keys_to_content_updates)

            self._post_import_file_status = self._pre_import_file_status.Duplicate(
            )

        if HG.file_import_report_mode:

            HydrusData.ShowText(
                'File import job is done, now publishing content updates')

        self.PubsubContentUpdates()

        return self._post_import_file_status
Exemple #19
0
 def _DoExport( self ):
     
     query_hash_ids = HG.client_controller.Read( 'file_query_ids', self._file_search_context )
     
     media_results = []
     
     i = 0
     
     base = 256
     
     while i < len( query_hash_ids ):
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         if i == 0: ( last_i, i ) = ( 0, base )
         else: ( last_i, i ) = ( i, i + base )
         
         sub_query_hash_ids = query_hash_ids[ last_i : i ]
         
         more_media_results = HG.client_controller.Read( 'media_results_from_ids', sub_query_hash_ids )
         
         media_results.extend( more_media_results )
         
     
     media_results.sort( key = lambda mr: mr.GetHashId() )
     
     #
     
     terms = ParseExportPhrase( self._phrase )
     
     previous_paths = set()
     
     for ( root, dirnames, filenames ) in os.walk( self._path ):
         
         previous_paths.update( ( os.path.join( root, filename ) for filename in filenames ) )
         
     
     sync_paths = set()
     
     client_files_manager = HG.client_controller.client_files_manager
     
     num_copied = 0
     
     for media_result in media_results:
         
         if HC.options[ 'pause_export_folders_sync' ] or HydrusThreading.IsThreadShuttingDown():
             
             return
             
         
         hash = media_result.GetHash()
         mime = media_result.GetMime()
         size = media_result.GetSize()
         
         source_path = client_files_manager.GetFilePath( hash, mime )
         
         filename = GenerateExportFilename( self._path, media_result, terms )
         
         dest_path = os.path.normpath( os.path.join( self._path, filename ) )
         
         if not dest_path.startswith( self._path ):
             
             raise Exception( 'It seems a destination path for export folder "{}" was above the main export directory! The file was "{}" and its destination path was "{}".'.format( self._path, hash.hex(), dest_path ) )
             
         
         dest_path_dir = os.path.dirname( dest_path )
         
         HydrusPaths.MakeSureDirectoryExists( dest_path_dir )
         
         if dest_path not in sync_paths:
             
             copied = HydrusPaths.MirrorFile( source_path, dest_path )
             
             if copied:
                 
                 num_copied += 1
                 
                 HydrusPaths.MakeFileWritable( dest_path )
                 
             
         
         sync_paths.add( dest_path )
         
     
     if num_copied > 0:
         
         HydrusData.Print( 'Export folder ' + self._name + ' exported ' + HydrusData.ToHumanInt( num_copied ) + ' files.' )
         
     
     if self._export_type == HC.EXPORT_FOLDER_TYPE_SYNCHRONISE:
         
         deletee_paths = previous_paths.difference( sync_paths )
         
         for deletee_path in deletee_paths:
             
             ClientPaths.DeletePath( deletee_path )
             
         
         deletee_dirs = set()
         
         for ( root, dirnames, filenames ) in os.walk( self._path, topdown = False ):
             
             if root == self._path:
                 
                 continue
                 
             
             no_files = len( filenames ) == 0
             
             useful_dirnames = [ dirname for dirname in dirnames if os.path.join( root, dirname ) not in deletee_dirs ]
             
             no_useful_dirs = len( useful_dirnames ) == 0
             
             if no_useful_dirs and no_files:
                 
                 deletee_dirs.add( root )
                 
             
         
         for deletee_dir in deletee_dirs:
             
             if os.path.exists( deletee_dir ):
                 
                 HydrusPaths.DeletePath( deletee_dir )
                 
             
         
         if len( deletee_paths ) > 0:
             
             HydrusData.Print( 'Export folder {} deleted {} files and {} folders.'.format( self._name, HydrusData.ToHumanInt( len( deletee_paths ) ), HydrusData.ToHumanInt( len( deletee_dirs ) ) ) )
             
         
     
     if self._delete_from_client_after_export:
         
         deletee_hashes = { media_result.GetHash() for media_result in media_results }
         
         chunks_of_hashes = HydrusData.SplitListIntoChunks( deletee_hashes, 64 )
         
         reason = 'Deleted after export to Export Folder "{}".'.format( self._path )
         
         content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, chunk_of_hashes, reason = reason ) for chunk_of_hashes in chunks_of_hashes ]
         
         for content_update in content_updates:
             
             HG.client_controller.WriteSynchronous( 'content_updates', { CC.LOCAL_FILE_SERVICE_KEY : [ content_update ] } )
    def test_undo(self):

        hash_1 = HydrusData.GenerateKey()
        hash_2 = HydrusData.GenerateKey()
        hash_3 = HydrusData.GenerateKey()

        command_1 = {
            CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                         HC.CONTENT_UPDATE_ARCHIVE, {hash_1})
            ]
        }
        command_2 = {
            CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                         HC.CONTENT_UPDATE_INBOX, {hash_2})
            ]
        }
        command_3 = {
            CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                         HC.CONTENT_UPDATE_ARCHIVE,
                                         {hash_1, hash_3})
            ]
        }

        command_1_inverted = {
            CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                         HC.CONTENT_UPDATE_INBOX, {hash_1})
            ]
        }
        command_2_inverted = {
            CC.COMBINED_LOCAL_FILE_SERVICE_KEY: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                         HC.CONTENT_UPDATE_ARCHIVE, {hash_2})
            ]
        }

        undo_manager = ClientManagers.UndoManager(HG.client_controller)

        #

        HG.test_controller.ClearWrites('content_updates')

        undo_manager.AddCommand('content_updates', command_1)

        self.assertEqual(('undo archive 1 files', None),
                         undo_manager.GetUndoRedoStrings())

        undo_manager.AddCommand('content_updates', command_2)

        self.assertEqual(('undo inbox 1 files', None),
                         undo_manager.GetUndoRedoStrings())

        undo_manager.Undo()

        self.assertEqual(('undo archive 1 files', 'redo inbox 1 files'),
                         undo_manager.GetUndoRedoStrings())

        self.assertEqual(HG.test_controller.GetWrite('content_updates'),
                         [((command_2_inverted, ), {})])

        undo_manager.Redo()

        self.assertEqual(HG.test_controller.GetWrite('content_updates'),
                         [((command_2, ), {})])

        self.assertEqual(('undo inbox 1 files', None),
                         undo_manager.GetUndoRedoStrings())

        undo_manager.Undo()

        self.assertEqual(HG.test_controller.GetWrite('content_updates'),
                         [((command_2_inverted, ), {})])

        undo_manager.Undo()

        self.assertEqual(HG.test_controller.GetWrite('content_updates'),
                         [((command_1_inverted, ), {})])

        self.assertEqual((None, 'redo archive 1 files'),
                         undo_manager.GetUndoRedoStrings())

        undo_manager.AddCommand('content_updates', command_3)

        self.assertEqual(('undo archive 2 files', None),
                         undo_manager.GetUndoRedoStrings())
    def _add_mappings_to_services(self):

        content_updates = []

        for (hash, tags) in self._hashes_to_current_tags.items():

            for tag in tags:

                content_updates.append(
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                             HC.CONTENT_UPDATE_ADD,
                                             (tag, (hash, ))))

        for (hash, tags) in self._hashes_to_deleted_tags.items():

            for tag in tags:

                content_updates.append(
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                             HC.CONTENT_UPDATE_DELETE,
                                             (tag, (hash, ))))

        service_keys_to_content_updates = {
            CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: content_updates
        }

        self.WriteSynchronous('content_updates',
                              service_keys_to_content_updates)

        content_updates = []

        for (hash, tags) in self._hashes_to_current_tags.items():

            for tag in tags:

                content_updates.append(
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                             HC.CONTENT_UPDATE_ADD,
                                             (tag, (hash, ))))

        for (hash, tags) in self._hashes_to_pending_tags.items():

            for tag in tags:

                content_updates.append(
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                             HC.CONTENT_UPDATE_PEND,
                                             (tag, (hash, ))))

        for (hash, tags) in self._hashes_to_deleted_tags.items():

            for tag in tags:

                content_updates.append(
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                             HC.CONTENT_UPDATE_DELETE,
                                             (tag, (hash, ))))

        service_keys_to_content_updates = {
            service_key: content_updates
            for service_key in self._test_tag_repo_service_keys.values()
        }

        self.WriteSynchronous('content_updates',
                              service_keys_to_content_updates)
Exemple #22
0
def GetContentUpdatesForAppliedContentApplicationCommandTags( parent: QW.QWidget, service_key: bytes, service_type: int, action: int, media: typing.Collection[ ClientMedia.MediaSingleton ], tag: str ):
    
    hashes = set()
    
    for m in media:
        
        hashes.add( m.GetHash() )
        
    
    rows = [ ( tag, hashes ) ]
    
    can_add = False
    can_pend = False
    can_delete = False
    can_petition = True
    can_rescind_pend = False
    can_rescind_petition = False
    
    for m in media:
        
        tags_manager = m.GetTagsManager()
        
        current = tags_manager.GetCurrent( service_key, ClientTags.TAG_DISPLAY_STORAGE )
        pending = tags_manager.GetPending( service_key, ClientTags.TAG_DISPLAY_STORAGE )
        petitioned = tags_manager.GetPetitioned( service_key, ClientTags.TAG_DISPLAY_STORAGE )
        
        if tag not in current:
            
            can_add = True
            
        
        if tag not in current and tag not in pending:
            
            can_pend = True
            
        
        if tag in current and action == HC.CONTENT_UPDATE_FLIP:
            
            can_delete = True
            
        
        if tag in current and tag not in petitioned and action == HC.CONTENT_UPDATE_FLIP:
            
            can_petition = True
            
        
        if tag in pending and action == HC.CONTENT_UPDATE_FLIP:
            
            can_rescind_pend = True
            
        
        if tag in petitioned:
            
            can_rescind_petition = True
            
        
    
    reason = None
    
    if service_type == HC.LOCAL_TAG:
        
        if can_add:
            
            content_update_action = HC.CONTENT_UPDATE_ADD
            
        elif can_delete:
            
            content_update_action = HC.CONTENT_UPDATE_DELETE
            
        else:
            
            return []
            
        
    else:
        
        if can_rescind_petition:
            
            content_update_action = HC.CONTENT_UPDATE_RESCIND_PETITION
            
        elif can_pend:
            
            content_update_action = HC.CONTENT_UPDATE_PEND
            
        elif can_rescind_pend:
            
            content_update_action = HC.CONTENT_UPDATE_RESCIND_PEND
            
        elif can_petition:
            
            message = 'Enter a reason for this tag to be removed. A janitor will review your petition.'
            
            from hydrus.client.gui import ClientGUIDialogs
            
            with ClientGUIDialogs.DialogTextEntry( parent, message ) as dlg:
                
                if dlg.exec() == QW.QDialog.Accepted:
                    
                    content_update_action = HC.CONTENT_UPDATE_PETITION
                    
                    reason = dlg.GetValue()
                    
                else:
                    
                    return []
                    
                
            
        else:
            
            return []
            
        
    
    content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, content_update_action, row, reason = reason ) for row in rows ]
    
    return content_updates
Exemple #23
0
 def work_callable():
     
     job_key = ClientThreading.JobKey( cancellable = True )
     
     title = 'moving files' if action == HC.CONTENT_UPDATE_MOVE else 'adding files'
     
     job_key.SetStatusTitle( title )
     
     BLOCK_SIZE = 64
     
     if len( applicable_media ) > BLOCK_SIZE:
         
         HG.client_controller.pub( 'message', job_key )
         
     
     pauser = HydrusData.BigJobPauser()
     
     num_to_do = len( applicable_media )
     
     now = HydrusData.GetNow()
     
     for ( i, block_of_media ) in enumerate( HydrusData.SplitListIntoChunks( applicable_media, BLOCK_SIZE ) ):
         
         if job_key.IsCancelled():
             
             break
             
         
         job_key.SetVariable( 'popup_text_1', HydrusData.ConvertValueRangeToPrettyString( i * BLOCK_SIZE, num_to_do ) )
         job_key.SetVariable( 'popup_gauge_1', ( i * BLOCK_SIZE, num_to_do ) )
         
         content_updates = []
         undelete_hashes = set()
         
         for m in block_of_media:
             
             if dest_service_key in m.GetLocationsManager().GetDeleted():
                 
                 undelete_hashes.add( m.GetHash() )
                 
             else:
                 
                 content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_ADD, ( m.GetMediaResult().GetFileInfoManager(), now ) ) )
                 
             
         
         if len( undelete_hashes ) > 0:
             
             content_updates.append( HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, undelete_hashes ) )
             
         
         HG.client_controller.WriteSynchronous( 'content_updates', { dest_service_key : content_updates } )
         
         if action == HC.CONTENT_UPDATE_MOVE:
             
             block_of_hashes = [ m.GetHash() for m in block_of_media ]
             
             content_updates = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, block_of_hashes, reason = 'Moved to {}'.format( dest_service_name ) ) ]
             
             HG.client_controller.WriteSynchronous( 'content_updates', { source_service_key : content_updates } )
             
         
         pauser.Pause()
         
     
     job_key.Delete()
    def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS(self):
        def test(obj, dupe_obj):

            self.assertEqual(obj.ToTuple(), dupe_obj.ToTuple())

        duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions(
            [(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE,
              ClientTags.TagFilter())],
            [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE),
             (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY,
              HC.CONTENT_MERGE_ACTION_MOVE)])
        duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions(
            [(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY,
              ClientTags.TagFilter())],
            [(TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY),
             (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY,
              HC.CONTENT_MERGE_ACTION_COPY)])
        duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions(
            [(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
              HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter())],
            [(TC.LOCAL_RATING_LIKE_SERVICE_KEY,
              HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE),
             (TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY,
              HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE)])

        inbox = True
        size = 40960
        mime = HC.IMAGE_JPEG
        width = 640
        height = 480
        duration = None
        num_frames = None
        has_audio = False
        num_words = None

        local_locations_manager = ClientMediaManagers.LocationsManager(
            {CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY},
            set(), set(), set(), inbox)
        trash_locations_manager = ClientMediaManagers.LocationsManager(
            {CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(),
            set(), set(), inbox)
        deleted_locations_manager = ClientMediaManagers.LocationsManager(
            set(), {CC.COMBINED_LOCAL_FILE_SERVICE_KEY}, set(), set(), inbox)

        # duplicate to generate proper dicts

        one_tags_manager = ClientMediaManagers.TagsManager(
            {
                CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: {
                    HC.CONTENT_STATUS_CURRENT: {'one'}
                }
            }, {
                CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: {
                    HC.CONTENT_STATUS_CURRENT: {'one'}
                }
            }).Duplicate()
        two_tags_manager = ClientMediaManagers.TagsManager(
            {
                CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: {
                    HC.CONTENT_STATUS_CURRENT: {'two'}
                }
            }, {
                CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: {
                    HC.CONTENT_STATUS_CURRENT: {'two'}
                }
            }).Duplicate()
        substantial_tags_manager = ClientMediaManagers.TagsManager(
            {
                CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: {
                    HC.CONTENT_STATUS_CURRENT:
                    {'test tag', 'series:namespaced test tag'}
                }
            }, {
                CC.DEFAULT_LOCAL_TAG_SERVICE_KEY: {
                    HC.CONTENT_STATUS_CURRENT:
                    {'test tag', 'series:namespaced test tag'}
                }
            }).Duplicate()
        empty_tags_manager = ClientMediaManagers.TagsManager({},
                                                             {}).Duplicate()

        one_ratings_manager = ClientMediaManagers.RatingsManager({
            TC.LOCAL_RATING_LIKE_SERVICE_KEY:
            1.0,
            TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY:
            0.8
        })
        two_ratings_manager = ClientMediaManagers.RatingsManager({
            TC.LOCAL_RATING_LIKE_SERVICE_KEY:
            0.0,
            TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY:
            0.6
        })
        substantial_ratings_manager = ClientMediaManagers.RatingsManager({
            TC.LOCAL_RATING_LIKE_SERVICE_KEY:
            1.0,
            TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY:
            0.8
        })
        empty_ratings_manager = ClientMediaManagers.RatingsManager({})

        notes_manager = ClientMediaManagers.NotesManager({})

        file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager(
        )

        #

        local_hash_has_values = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            1, local_hash_has_values, size, mime, width, height, duration,
            num_frames, has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, substantial_tags_manager,
            local_locations_manager, substantial_ratings_manager,
            notes_manager, file_viewing_stats_manager)

        local_media_has_values = ClientMedia.MediaSingleton(media_result)

        #

        other_local_hash_has_values = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            2, other_local_hash_has_values, size, mime, width, height,
            duration, num_frames, has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, substantial_tags_manager,
            local_locations_manager, substantial_ratings_manager,
            notes_manager, file_viewing_stats_manager)

        other_local_media_has_values = ClientMedia.MediaSingleton(media_result)

        #

        local_hash_empty = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            3, local_hash_empty, size, mime, width, height, duration,
            num_frames, has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, empty_tags_manager, local_locations_manager,
            empty_ratings_manager, notes_manager, file_viewing_stats_manager)

        local_media_empty = ClientMedia.MediaSingleton(media_result)

        #

        trashed_hash_empty = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            4, trashed_hash_empty, size, mime, width, height, duration,
            num_frames, has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, empty_tags_manager, trash_locations_manager,
            empty_ratings_manager, notes_manager, file_viewing_stats_manager)

        trashed_media_empty = ClientMedia.MediaSingleton(media_result)

        #

        deleted_hash_empty = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            5, deleted_hash_empty, size, mime, width, height, duration,
            num_frames, has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, empty_tags_manager, deleted_locations_manager,
            empty_ratings_manager, notes_manager, file_viewing_stats_manager)

        deleted_media_empty = ClientMedia.MediaSingleton(media_result)

        #

        one_hash = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            6, one_hash, size, mime, width, height, duration, num_frames,
            has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, one_tags_manager, local_locations_manager,
            one_ratings_manager, notes_manager, file_viewing_stats_manager)

        one_media = ClientMedia.MediaSingleton(media_result)

        #

        two_hash = HydrusData.GenerateKey()

        file_info_manager = ClientMediaManagers.FileInfoManager(
            7, two_hash, size, mime, width, height, duration, num_frames,
            has_audio, num_words)

        media_result = ClientMediaResult.MediaResult(
            file_info_manager, two_tags_manager, local_locations_manager,
            two_ratings_manager, notes_manager, file_viewing_stats_manager)

        two_media = ClientMedia.MediaSingleton(media_result)

        #

        self._dump_and_load_and_test(duplicate_action_options_delete_and_move,
                                     test)
        self._dump_and_load_and_test(duplicate_action_options_copy, test)
        self._dump_and_load_and_test(duplicate_action_options_merge, test)

        #

        def assertSCUEqual(one, two):

            self.assertEqual(
                TC.ConvertServiceKeysToContentUpdatesToComparable(one),
                TC.ConvertServiceKeysToContentUpdatesToComparable(two))

        file_deletion_reason = 'test delete'

        #

        result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates(
            local_media_has_values,
            local_media_empty,
            delete_second=True,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.LOCAL_FILE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                     HC.CONTENT_UPDATE_DELETE,
                                     {local_hash_empty},
                                     reason=file_deletion_reason)
        ]

        assertSCUEqual(result, scu)

        #

        result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates(
            local_media_has_values,
            trashed_media_empty,
            delete_second=True,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.TRASH_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                     HC.CONTENT_UPDATE_DELETE,
                                     {trashed_hash_empty},
                                     reason=file_deletion_reason)
        ]

        assertSCUEqual(result, scu)

        #

        result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates(
            local_media_has_values,
            deleted_media_empty,
            delete_second=True,
            file_deletion_reason=file_deletion_reason)

        self.assertEqual(result, {})

        #

        result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates(
            local_media_has_values,
            other_local_media_has_values,
            delete_second=True,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE,
                ('test tag', {other_local_hash_has_values})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE,
                ('series:namespaced test tag', {other_local_hash_has_values}))
        ]
        scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (None, {other_local_hash_has_values}))
        ]
        scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (None, {other_local_hash_has_values}))
        ]
        scu[CC.LOCAL_FILE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                     HC.CONTENT_UPDATE_DELETE,
                                     {other_local_hash_has_values},
                                     reason=file_deletion_reason)
        ]

        assertSCUEqual(result, scu)

        #

        result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates(
            local_media_empty,
            other_local_media_has_values,
            delete_second=True,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('test tag', {local_hash_empty})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD,
                ('series:namespaced test tag', {local_hash_empty})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE,
                ('test tag', {other_local_hash_has_values})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE,
                ('series:namespaced test tag', {other_local_hash_has_values}))
        ]
        scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (1.0, {local_hash_empty})),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (None, {other_local_hash_has_values}))
        ]
        scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (0.8, {local_hash_empty})),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (None, {other_local_hash_has_values}))
        ]
        scu[CC.LOCAL_FILE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                     HC.CONTENT_UPDATE_DELETE,
                                     {other_local_hash_has_values},
                                     reason=file_deletion_reason)
        ]

        assertSCUEqual(result, scu)

        #
        #

        result = duplicate_action_options_copy.ProcessPairIntoContentUpdates(
            local_media_has_values,
            local_media_empty,
            file_deletion_reason=file_deletion_reason)

        self.assertEqual(result, {})

        #

        result = duplicate_action_options_copy.ProcessPairIntoContentUpdates(
            local_media_empty,
            other_local_media_has_values,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('test tag', {local_hash_empty})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD,
                ('series:namespaced test tag', {local_hash_empty}))
        ]
        scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (1.0, {local_hash_empty}))
        ]
        scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (0.8, {local_hash_empty}))
        ]

        assertSCUEqual(result, scu)

        #
        #

        result = duplicate_action_options_merge.ProcessPairIntoContentUpdates(
            local_media_has_values,
            local_media_empty,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('test tag', {local_hash_empty})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD,
                ('series:namespaced test tag', {local_hash_empty}))
        ]
        scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (1.0, {local_hash_empty}))
        ]
        scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (0.8, {local_hash_empty}))
        ]

        assertSCUEqual(result, scu)

        #

        result = duplicate_action_options_merge.ProcessPairIntoContentUpdates(
            local_media_empty,
            other_local_media_has_values,
            file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('test tag', {local_hash_empty})),
            HydrusData.ContentUpdate(
                HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD,
                ('series:namespaced test tag', {local_hash_empty}))
        ]
        scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (1.0, {local_hash_empty}))
        ]
        scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     (0.8, {local_hash_empty}))
        ]

        assertSCUEqual(result, scu)

        #

        result = duplicate_action_options_merge.ProcessPairIntoContentUpdates(
            one_media, two_media, file_deletion_reason=file_deletion_reason)

        scu = {}

        scu[CC.DEFAULT_LOCAL_TAG_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('one', {two_hash})),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('two', {one_hash}))
        ]
        scu[TC.LOCAL_RATING_LIKE_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD, (1.0, {two_hash}))
        ]
        scu[TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_RATINGS,
                                     HC.CONTENT_UPDATE_ADD, (0.8, {two_hash}))
        ]

        assertSCUEqual(result, scu)
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media)) in enumerate(to_do):

                try:

                    QP.CallAfter(
                        qt_update_label,
                        HydrusData.ConvertValueRangeToPrettyString(
                            index + 1, num_to_do))

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = self._GetPath(media)

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key,
                                ClientTags.TAG_DISPLAY_SIBLINGS_AND_PARENTS)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWritable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                deletee_hashes = {
                    media.GetHash()
                    for (ordering_index, media) in to_do
                }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)
Exemple #26
0
        def do_it(directory, neighbouring_txt_tag_service_keys,
                  delete_afterwards, export_symlinks, quit_afterwards):

            job_key = ClientThreading.JobKey(cancellable=True)

            job_key.SetStatusTitle('file export')

            HG.client_controller.pub('message', job_key)

            pauser = HydrusData.BigJobPauser()

            for (index, (ordering_index, media, path)) in enumerate(to_do):

                if job_key.IsCancelled():

                    break

                try:

                    x_of_y = HydrusData.ConvertValueRangeToPrettyString(
                        index + 1, num_to_do)

                    job_key.SetVariable('popup_text_1',
                                        'Done {}'.format(x_of_y))
                    job_key.SetVariable('popup_gauge_1',
                                        (index + 1, num_to_do))

                    QP.CallAfter(qt_update_label, x_of_y)

                    hash = media.GetHash()
                    mime = media.GetMime()

                    path = os.path.normpath(path)

                    if not path.startswith(directory):

                        raise Exception(
                            'It seems a destination path was above the main export directory! The file was "{}" and its destination path was "{}".'
                            .format(hash.hex(), path))

                    path_dir = os.path.dirname(path)

                    HydrusPaths.MakeSureDirectoryExists(path_dir)

                    if export_tag_txts:

                        tags_manager = media.GetTagsManager()

                        tags = set()

                        for service_key in neighbouring_txt_tag_service_keys:

                            current_tags = tags_manager.GetCurrent(
                                service_key, ClientTags.TAG_DISPLAY_ACTUAL)

                            tags.update(current_tags)

                        tags = sorted(tags)

                        txt_path = path + '.txt'

                        with open(txt_path, 'w', encoding='utf-8') as f:

                            f.write(os.linesep.join(tags))

                    source_path = client_files_manager.GetFilePath(
                        hash, mime, check_file_exists=False)

                    if export_symlinks:

                        os.symlink(source_path, path)

                    else:

                        HydrusPaths.MirrorFile(source_path, path)

                        HydrusPaths.MakeFileWriteable(path)

                except:

                    QP.CallAfter(
                        QW.QMessageBox.information, self, 'Information',
                        'Encountered a problem while attempting to export file with index '
                        + str(ordering_index + 1) + ':' + os.linesep * 2 +
                        traceback.format_exc())

                    break

                pauser.Pause()

            if not job_key.IsCancelled() and delete_afterwards:

                QP.CallAfter(qt_update_label, 'deleting')

                delete_lock_for_archived_files = HG.client_controller.new_options.GetBoolean(
                    'delete_lock_for_archived_files')

                if delete_lock_for_archived_files:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                        if not media.HasArchive()
                    }

                else:

                    deletee_hashes = {
                        media.GetHash()
                        for (ordering_index, media, path) in to_do
                    }

                chunks_of_hashes = HydrusData.SplitListIntoChunks(
                    deletee_hashes, 64)

                reason = 'Deleted after manual export to "{}".'.format(
                    directory)

                content_updates = [
                    HydrusData.ContentUpdate(HC.CONTENT_TYPE_FILES,
                                             HC.CONTENT_UPDATE_DELETE,
                                             chunk_of_hashes,
                                             reason=reason)
                    for chunk_of_hashes in chunks_of_hashes
                ]

                for content_update in content_updates:

                    HG.client_controller.WriteSynchronous(
                        'content_updates',
                        {CC.LOCAL_FILE_SERVICE_KEY: [content_update]})

            job_key.DeleteVariable('popup_gauge_1')
            job_key.SetVariable('popup_text_1', 'Done!')

            job_key.Finish()

            job_key.Delete(5)

            QP.CallAfter(qt_update_label, 'done!')

            time.sleep(1)

            QP.CallAfter(qt_update_label, 'export')

            QP.CallAfter(qt_done, quit_afterwards)
Exemple #27
0
def UndeleteMedia( win, media ):
    
    media_deleted_service_keys = HydrusData.MassUnion( ( m.GetLocationsManager().GetDeleted() for m in media ) )
    
    local_file_services = HG.client_controller.services_manager.GetServices( ( HC.LOCAL_FILE_DOMAIN, ) )
    
    undeletable_services = [ local_file_service for local_file_service in local_file_services if local_file_service.GetServiceKey() in media_deleted_service_keys ]
    
    if len( undeletable_services ) > 0:
        
        do_it = False
        
        if len( undeletable_services ) > 1:
            
            choice_tuples = []
            
            for ( i, service ) in enumerate( undeletable_services ):
                
                choice_tuples.append( ( service.GetName(), service, 'Undelete back to {}.'.format( service.GetName() ) ) )
                
            
            if len( choice_tuples ) > 1:
                
                service = HG.client_controller.services_manager.GetService( CC.COMBINED_LOCAL_MEDIA_SERVICE_KEY )
                
                choice_tuples.append( ( 'all the above', service, 'Undelete back to all services the files have been deleted from.' ) )
                
            
            try:
                
                undelete_service = ClientGUIDialogsQuick.SelectFromListButtons( win, 'Undelete for?', choice_tuples )
                
                do_it = True
                
            except HydrusExceptions.CancelledException:
                
                return
                
            
        else:
    
            ( undelete_service, ) = undeletable_services
            
            if HC.options[ 'confirm_trash' ]:
                
                result = ClientGUIDialogsQuick.GetYesNo( win, 'Undelete this file back to {}?'.format( undelete_service.GetName() ) )
                
                if result == QW.QDialog.Accepted:
                    
                    do_it = True
                    
                
            else:
                
                do_it = True
                
            
        
        if do_it:
            
            for chunk_of_media in HydrusData.SplitIteratorIntoChunks( media, 64 ):
                
                service_keys_to_content_updates = collections.defaultdict( list )
                
                service_key = undelete_service.GetServiceKey()
                
                undeletee_hashes = [ m.GetHash() for m in chunk_of_media if service_key in m.GetLocationsManager().GetDeleted() ]
                
                service_keys_to_content_updates[ service_key ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_UNDELETE, undeletee_hashes ) ]
                
                HG.client_controller.Write( 'content_updates', service_keys_to_content_updates )