def _test_pairs_service_to_list( self, content_type ):
     
     def run_test( source, expected_data ):
         
         destination = ClientMigration.MigrationDestinationListPairs( self )
         
         job = ClientMigration.MigrationJob( self, 'test', source, destination )
         
         job.Run()
         
         self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
         
     
     ( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
     
     # test filters and content statuses
     
     tag_repo_service_key = self._test_tag_repo_service_keys[10]
     
     content_source_tests = []
     
     content_source_tests.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
     content_source_tests.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
     content_source_tests.append( ( tag_repo_service_key, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
     content_source_tests.append( ( tag_repo_service_key, ( current, pending ), ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) ) )
     content_source_tests.append( ( tag_repo_service_key, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
     
     free_filter = ClientTags.TagFilter()
     
     namespace_filter = ClientTags.TagFilter()
     
     namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
     namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
     
     test_filters = []
     
     test_filters.append( ( free_filter, free_filter ) )
     test_filters.append( ( namespace_filter, free_filter ) )
     test_filters.append( ( free_filter, namespace_filter ) )
     test_filters.append( ( namespace_filter, namespace_filter ) )
     
     for ( left_tag_filter, right_tag_filter ) in test_filters:
         
         for ( service_key, content_lists, content_statuses ) in content_source_tests:
             
             source = ClientMigration.MigrationSourceTagServicePairs( self, service_key, content_type, left_tag_filter, right_tag_filter, content_statuses )
             
             expected_data = set()
             
             for content_list in content_lists:
                 
                 expected_data.update( ( ( left_tag, right_tag ) for ( left_tag, right_tag ) in content_list if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ) )
                 
             
             run_test( source, expected_data )
Exemple #2
0
    def __init__(self,
                 name='new api permissions',
                 access_key=None,
                 basic_permissions=None,
                 search_tag_filter=None):

        if access_key is None:

            access_key = HydrusData.GenerateKey()

        if basic_permissions is None:

            basic_permissions = set()

        if search_tag_filter is None:

            search_tag_filter = ClientTags.TagFilter()

        HydrusSerialisable.SerialisableBaseNamed.__init__(self, name)

        self._access_key = access_key

        self._basic_permissions = set(basic_permissions)
        self._search_tag_filter = search_tag_filter

        self._last_search_results = None
        self._search_results_timeout = 0

        self._lock = threading.Lock()
Exemple #3
0
    def __init__(self, url=None, can_generate_more_pages=True):

        if url is None:

            url = 'https://nostrils-central.cx/index.php?post=s&tag=hyper_nostrils&page=3'

        else:

            try:

                url = HG.client_controller.network_engine.domain_manager.NormaliseURL(
                    url)

            except HydrusExceptions.URLClassException:

                pass

        HydrusSerialisable.SerialisableBase.__init__(self)

        self.url = url
        self._can_generate_more_pages = can_generate_more_pages

        self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags()

        self.created = HydrusData.GetNow()
        self.modified = self.created
        self.status = CC.STATUS_UNKNOWN
        self.note = ''

        self._referral_url = None

        self._force_next_page_url_generation = False
    def PendURLs(self, urls, service_keys_to_tags=None):

        if service_keys_to_tags is None:

            service_keys_to_tags = ClientTags.ServiceKeysToTags()

        with self._lock:

            urls = [u for u in urls if len(u) > 1
                    ]  # > _1_ to take out the occasional whitespace

            file_seeds = []

            gallery_seeds = []

            for url in urls:

                try:

                    url_class = HG.client_controller.network_engine.domain_manager.GetURLClass(
                        url)

                except HydrusExceptions.URLClassException:

                    continue

                if url_class is None or url_class.GetURLType() in (
                        HC.URL_TYPE_FILE, HC.URL_TYPE_POST):

                    file_seed = ClientImportFileSeeds.FileSeed(
                        ClientImportFileSeeds.FILE_SEED_TYPE_URL, url)

                    file_seed.SetFixedServiceKeysToTags(service_keys_to_tags)

                    file_seeds.append(file_seed)

                else:

                    can_generate_more_pages = False

                    gallery_seed = ClientImportGallerySeeds.GallerySeed(
                        url, can_generate_more_pages=can_generate_more_pages)

                    gallery_seed.SetFixedServiceKeysToTags(
                        service_keys_to_tags)

                    gallery_seeds.append(gallery_seed)

            if len(gallery_seeds) > 0:

                self._gallery_seed_log.AddGallerySeeds(gallery_seeds)

                ClientImporting.WakeRepeatingJob(self._gallery_repeating_job)

            if len(file_seeds) > 0:

                self._file_seed_cache.AddFileSeeds(file_seeds)

                ClientImporting.WakeRepeatingJob(self._files_repeating_job)
Exemple #5
0
    def _UpdateTagDisplay(self):

        favourites = list(
            HG.client_controller.new_options.GetSuggestedTagsFavourites(
                self._service_key))

        ClientTags.SortTags(HC.options['default_tag_sort'], favourites)

        tags = FilterSuggestedTagsForMedia(favourites, self._media,
                                           self._service_key)

        self._favourite_tags.SetTags(tags)
Exemple #6
0
    def __init__(self):

        HydrusSerialisable.SerialisableBase.__init__(self)

        self._page_key = 'initialising page key'
        self._publish_to_page = False

        self._url = ''

        self._gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog()
        self._file_seed_cache = ClientImportFileSeeds.FileSeedCache()

        self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags()

        self._checker_options = HG.client_controller.new_options.GetDefaultWatcherCheckerOptions(
        )
        self._file_import_options = HG.client_controller.new_options.GetDefaultFileImportOptions(
            'loud')
        self._tag_import_options = ClientImportOptions.TagImportOptions(
            is_default=True)
        self._last_check_time = 0
        self._checking_status = ClientImporting.CHECKER_STATUS_OK
        self._subject = 'unknown subject'

        self._next_check_time = None

        self._file_network_job = None
        self._checker_network_job = None

        self._check_now = False
        self._files_paused = False
        self._checking_paused = False

        self._no_work_until = 0
        self._no_work_until_reason = ''

        self._creation_time = HydrusData.GetNow()

        self._file_velocity_status = ''
        self._file_status = ''
        self._watcher_status = ''

        self._watcher_key = HydrusData.GenerateKey()

        self._lock = threading.Lock()

        self._last_pubbed_page_name = ''

        self._files_repeating_job = None
        self._checker_repeating_job = None

        HG.client_controller.sub(self, 'NotifyFileSeedsUpdated',
                                 'file_seed_cache_file_seeds_updated')
Exemple #7
0
    def _UpdateSerialisableInfo(self, version, old_serialisable_info):

        if version == 1:

            (url, can_generate_more_pages, created, modified, status, note,
             referral_url) = old_serialisable_info

            fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags()

            serialisable_fixed_service_keys_to_tags = fixed_service_keys_to_tags.GetSerialisableTuple(
            )

            new_serialisable_info = (url, can_generate_more_pages,
                                     serialisable_fixed_service_keys_to_tags,
                                     created, modified, status, note,
                                     referral_url)

            return (2, new_serialisable_info)
Exemple #8
0
    def SetFixedServiceKeysToTags(self, service_keys_to_tags):

        self._fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags(
            service_keys_to_tags)
 def _test_pairs_htpa_to_list( self, content_type ):
     
     def run_test( source, expected_data ):
         
         destination = ClientMigration.MigrationDestinationListPairs( self )
         
         job = ClientMigration.MigrationJob( self, 'test', source, destination )
         
         job.Run()
         
         self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
         
     
     ( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
     
     htpa_path = os.path.join( TestController.DB_DIR, 'htpa.db' )
     
     htpa = HydrusTagArchive.HydrusTagPairArchive( htpa_path )
     
     if content_type == HC.CONTENT_TYPE_TAG_PARENTS:
         
         htpa.SetPairType( HydrusTagArchive.TAG_PAIR_TYPE_PARENTS )
         
     elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:
         
         htpa.SetPairType( HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS )
         
     
     htpa.BeginBigJob()
     
     htpa.AddPairs( current )
     
     htpa.CommitBigJob()
     
     htpa.Optimise()
     
     htpa.Close()
     
     del htpa
     
     #
     
     # test tag filter, left, right, both
     
     free_filter = ClientTags.TagFilter()
     
     namespace_filter = ClientTags.TagFilter()
     
     namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
     namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
     
     test_filters = []
     
     test_filters.append( ( free_filter, free_filter ) )
     test_filters.append( ( namespace_filter, free_filter ) )
     test_filters.append( ( free_filter, namespace_filter ) )
     test_filters.append( ( namespace_filter, namespace_filter ) )
     
     for ( left_tag_filter, right_tag_filter ) in test_filters:
         
         source = ClientMigration.MigrationSourceHTPA( self, htpa_path, left_tag_filter, right_tag_filter )
         
         expected_data = [ ( left_tag, right_tag ) for ( left_tag, right_tag ) in current if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ]
         
         run_test( source, expected_data )
         
     
     #
     
     os.remove( htpa_path )
 def _test_mappings_service_to_list( self ):
     
     def run_test( source, expected_data ):
         
         destination = ClientMigration.MigrationDestinationListMappings( self )
         
         job = ClientMigration.MigrationJob( self, 'test', source, destination )
         
         job.Run()
         
         self.assertEqual( dict( destination.GetDataReceived() ), dict( expected_data ) )
         
     
     # test file filter
     
     tag_repo_service_key = self._test_tag_repo_service_keys[0]
     
     tag_filter = ClientTags.TagFilter()
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = list( self._hashes_to_current_tags.items() )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = list( self._hashes_to_current_tags.items() )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
     
     run_test( source, expected_data )
     
     # not all hashes, since hash type lookup only available for imported files
     hashes = random.sample( self._my_files_sha256, 25 )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
     
     run_test( source, expected_data )
     
     # test desired hash type
     
     # not all hashes, since hash type lookup only available for imported files
     expected_data = [ ( self._sha256_to_sha1[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     run_test( source, expected_data )
     
     # tag filter
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_WHITELIST )
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = [ ( hash, tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
     expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, ) )
     
     expected_data = [ ( hash, tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
     expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
     
     run_test( source, expected_data )
     
     # test statuses
     
     tag_filter = ClientTags.TagFilter()
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_DELETED, ) )
     
     expected_data = list( self._hashes_to_deleted_tags.items() )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_DELETED, ) )
     
     expected_data = list( self._hashes_to_deleted_tags.items() )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceTagServiceMappings( self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter, ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) )
     
     expected_data = collections.defaultdict( set )
     
     for ( hash, tags ) in self._hashes_to_current_tags.items():
         
         expected_data[ hash ].update( tags )
         
     
     for ( hash, tags ) in self._hashes_to_pending_tags.items():
         
         expected_data[ hash ].update( tags )
         
     
     expected_data = list( expected_data.items() )
     
     run_test( source, expected_data )
 def _test_mappings_hta_to_list( self ):
     
     def run_test( source, expected_data ):
         
         destination = ClientMigration.MigrationDestinationListMappings( self )
         
         job = ClientMigration.MigrationJob( self, 'test', source, destination )
         
         job.Run()
         
         self.assertEqual( dict( destination.GetDataReceived() ), dict( expected_data ) )
         
     
     md5_hta_path = os.path.join( TestController.DB_DIR, 'md5hta.db' )
     sha256_hta_path = os.path.join( TestController.DB_DIR, 'sha256hta.db' )
     
     md5_hta = HydrusTagArchive.HydrusTagArchive( md5_hta_path )
     sha256_hta = HydrusTagArchive.HydrusTagArchive( sha256_hta_path )
     
     md5_hta.SetHashType( HydrusTagArchive.HASH_TYPE_MD5 )
     sha256_hta.SetHashType( HydrusTagArchive.HASH_TYPE_SHA256 )
     
     md5_hta.BeginBigJob()
     sha256_hta.BeginBigJob()
     
     for ( hash, tags ) in self._hashes_to_current_tags.items():
         
         md5 = self._sha256_to_md5[ hash ]
         
         md5_hta.AddMappings( md5, tags )
         sha256_hta.AddMappings( hash, tags )
         
     
     md5_hta.CommitBigJob()
     sha256_hta.CommitBigJob()
     
     md5_hta.Optimise()
     sha256_hta.Optimise()
     
     md5_hta.Close()
     sha256_hta.Close()
     
     del md5_hta
     del sha256_hta
     
     #
     
     # test file filter
     
     tag_filter = ClientTags.TagFilter()
     
     source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None, tag_filter )
     
     expected_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter )
     
     expected_data = list( self._hashes_to_current_tags.items() )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.LOCAL_FILE_SERVICE_KEY, 'md5', None, tag_filter )
     
     expected_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter )
     
     expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
     
     run_test( source, expected_data )
     
     # not all hashes, since hash type lookup only available for imported files
     hashes = random.sample( self._my_files_sha256, 25 )
     
     source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', hashes, tag_filter )
     
     expected_data = [ ( self._sha256_to_md5[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter )
     
     expected_data = [ ( hash, tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
     
     run_test( source, expected_data )
     
     # test desired hash type
     
     # not all hashes, since hash type lookup only available for imported files
     expected_data = [ ( self._sha256_to_sha1[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256 ]
     
     source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter )
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter )
     
     run_test( source, expected_data )
     
     # do a test with specific hashes, so md5->sha1 does interim sha256 conversion
     # not all hashes, since hash type lookup only available for imported files
     hashes = random.sample( self._my_files_sha256, 25 )
     
     expected_data = [ ( self._sha256_to_sha1[ hash ], tags ) for ( hash, tags ) in self._hashes_to_current_tags.items() if hash in hashes ]
     
     source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', hashes, tag_filter )
     
     run_test( source, expected_data )
     
     # tag filter
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_WHITELIST )
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     
     source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None, tag_filter )
     
     expected_data = [ ( self._sha256_to_md5[ hash ], tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
     expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
     
     run_test( source, expected_data )
     
     source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter )
     
     expected_data = [ ( hash, tag_filter.Filter( tags ) ) for ( hash, tags ) in self._hashes_to_current_tags.items() ]
     expected_data = [ ( hash, tags ) for ( hash, tags ) in expected_data if len( tags ) > 0 ]
     
     run_test( source, expected_data )
     
     #
     
     os.remove( md5_hta_path )
     os.remove( sha256_hta_path )
Exemple #12
0
    def test_dict_to_content_updates(self):

        hash = HydrusData.GenerateKey()

        hashes = {hash}

        local_key = CC.DEFAULT_LOCAL_TAG_SERVICE_KEY
        remote_key = HG.test_controller.example_tag_repo_service_key

        service_keys_to_tags = ClientTags.ServiceKeysToTags({local_key: {'a'}})

        content_updates = {
            local_key: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                         HC.CONTENT_UPDATE_ADD, ('a', hashes))
            ]
        }

        self.assertEqual(
            ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                {hash}, service_keys_to_tags), content_updates)

        service_keys_to_tags = ClientTags.ServiceKeysToTags(
            {remote_key: {'c'}})

        content_updates = {
            remote_key: [
                HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                         HC.CONTENT_UPDATE_PEND, ('c', hashes))
            ]
        }

        self.assertEqual(
            ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                {hash}, service_keys_to_tags), content_updates)

        service_keys_to_tags = ClientTags.ServiceKeysToTags({
            local_key: ['a', 'character:b'],
            remote_key: ['c', 'series:d']
        })

        content_updates = {}

        content_updates[local_key] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD, ('a', hashes)),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_ADD,
                                     ('character:b', hashes))
        ]
        content_updates[remote_key] = [
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND, ('c', hashes)),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND,
                                     ('series:d', hashes))
        ]

        self.assertEqual(
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND, 'c'),
            HydrusData.ContentUpdate(HC.CONTENT_TYPE_MAPPINGS,
                                     HC.CONTENT_UPDATE_PEND, 'c'))
        self.assertEqual(
            ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                {hash}, service_keys_to_tags), content_updates)
Exemple #13
0
    def _UpdateSerialisableInfo(self, version, old_serialisable_info):

        if version == 1:

            (url, serialisable_file_seed_cache, urls_to_filenames,
             urls_to_md5_base64, serialisable_file_import_options,
             serialisable_tag_import_options, times_to_check, check_period,
             last_check_time, paused) = old_serialisable_info

            checker_options = ClientImportOptions.CheckerOptions(
                intended_files_per_check=8,
                never_faster_than=300,
                never_slower_than=86400,
                death_file_velocity=(1, 86400))

            serialisable_checker_options = checker_options.GetSerialisableTuple(
            )

            files_paused = paused
            checking_paused = paused

            new_serialisable_info = (url, serialisable_file_seed_cache,
                                     urls_to_filenames, urls_to_md5_base64,
                                     serialisable_checker_options,
                                     serialisable_file_import_options,
                                     serialisable_tag_import_options,
                                     last_check_time, files_paused,
                                     checking_paused)

            return (2, new_serialisable_info)

        if version == 2:

            (url, serialisable_file_seed_cache, urls_to_filenames,
             urls_to_md5_base64, serialisable_checker_options,
             serialisable_file_import_options, serialisable_tag_import_options,
             last_check_time, files_paused,
             checking_paused) = old_serialisable_info

            checking_status = ClientImporting.CHECKER_STATUS_OK
            subject = 'unknown subject'

            new_serialisable_info = (url, serialisable_file_seed_cache,
                                     urls_to_filenames, urls_to_md5_base64,
                                     serialisable_checker_options,
                                     serialisable_file_import_options,
                                     serialisable_tag_import_options,
                                     last_check_time, files_paused,
                                     checking_paused, checking_status, subject)

            return (3, new_serialisable_info)

        if version == 3:

            (url, serialisable_file_seed_cache, urls_to_filenames,
             urls_to_md5_base64, serialisable_checker_options,
             serialisable_file_import_options, serialisable_tag_import_options,
             last_check_time, files_paused, checking_paused, checking_status,
             subject) = old_serialisable_info

            no_work_until = 0
            no_work_until_reason = ''

            new_serialisable_info = (url, serialisable_file_seed_cache,
                                     urls_to_filenames, urls_to_md5_base64,
                                     serialisable_checker_options,
                                     serialisable_file_import_options,
                                     serialisable_tag_import_options,
                                     last_check_time, files_paused,
                                     checking_paused, checking_status, subject,
                                     no_work_until, no_work_until_reason)

            return (4, new_serialisable_info)

        if version == 4:

            (url, serialisable_file_seed_cache, urls_to_filenames,
             urls_to_md5_base64, serialisable_checker_options,
             serialisable_file_import_options, serialisable_tag_import_options,
             last_check_time, files_paused, checking_paused, checking_status,
             subject, no_work_until,
             no_work_until_reason) = old_serialisable_info

            creation_time = HydrusData.GetNow()

            new_serialisable_info = (url, serialisable_file_seed_cache,
                                     urls_to_filenames, urls_to_md5_base64,
                                     serialisable_checker_options,
                                     serialisable_file_import_options,
                                     serialisable_tag_import_options,
                                     last_check_time, files_paused,
                                     checking_paused, checking_status, subject,
                                     no_work_until, no_work_until_reason,
                                     creation_time)

            return (5, new_serialisable_info)

        if version == 5:

            (url, serialisable_file_seed_cache, urls_to_filenames,
             urls_to_md5_base64, serialisable_checker_options,
             serialisable_file_import_options, serialisable_tag_import_options,
             last_check_time, files_paused, checking_paused, checking_status,
             subject, no_work_until, no_work_until_reason,
             creation_time) = old_serialisable_info

            gallery_seed_log = ClientImportGallerySeeds.GallerySeedLog()

            serialisable_gallery_seed_log = gallery_seed_log.GetSerialisableTuple(
            )

            new_serialisable_info = (url, serialisable_gallery_seed_log,
                                     serialisable_file_seed_cache,
                                     urls_to_filenames, urls_to_md5_base64,
                                     serialisable_checker_options,
                                     serialisable_file_import_options,
                                     serialisable_tag_import_options,
                                     last_check_time, files_paused,
                                     checking_paused, checking_status, subject,
                                     no_work_until, no_work_until_reason,
                                     creation_time)

            return (6, new_serialisable_info)

        if version == 6:

            (url, serialisable_gallery_seed_log, serialisable_file_seed_cache,
             urls_to_filenames, urls_to_md5_base64,
             serialisable_checker_options, serialisable_file_import_options,
             serialisable_tag_import_options, last_check_time, files_paused,
             checking_paused, checking_status, subject, no_work_until,
             no_work_until_reason, creation_time) = old_serialisable_info

            fixed_service_keys_to_tags = ClientTags.ServiceKeysToTags()

            serialisable_fixed_service_keys_to_tags = fixed_service_keys_to_tags.GetSerialisableTuple(
            )

            new_serialisable_info = (url, serialisable_gallery_seed_log,
                                     serialisable_file_seed_cache,
                                     serialisable_fixed_service_keys_to_tags,
                                     serialisable_checker_options,
                                     serialisable_file_import_options,
                                     serialisable_tag_import_options,
                                     last_check_time, files_paused,
                                     checking_paused, checking_status, subject,
                                     no_work_until, no_work_until_reason,
                                     creation_time)

            return (7, new_serialisable_info)
 def test_SERIALISABLE_TYPE_TAG_FILTER( self ):
     
     def test( obj, dupe_obj ):
         
         self.assertEqual( obj._tag_slices_to_rules, dupe_obj._tag_slices_to_rules )
         
     
     tags = set()
     
     tags.add( 'title:test title' )
     tags.add( 'series:neon genesis evangelion' )
     tags.add( 'series:kill la kill' )
     tags.add( 'smile' )
     tags.add( 'blue eyes' )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), set() )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'series:neon genesis evangelion', 'series:kill la kill' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'series:kill la kill' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'smile', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:neon genesis evangelion', 'series:kill la kill' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( ':', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:kill la kill' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'series:neon genesis evangelion', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( '', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'blue eyes', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill', 'blue eyes' } )
     
     # blacklist namespace test
     
     blacklist_tags = { 'nintendo', 'studio:nintendo' }
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( blacklist_tags ), { 'studio:nintendo' } )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), set() )
     
     #
     
     tag_filter = ClientTags.TagFilter()
     
     tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST )
     tag_filter.SetRule( 'studio:nintendo', CC.FILTER_WHITELIST )
     
     self._dump_and_load_and_test( tag_filter, test )
     
     self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), { 'studio:nintendo' } )
 def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS( self ):
     
     def test( obj, dupe_obj ):
         
         self.assertEqual( obj.ToTuple(), dupe_obj.ToTuple() )
         
     
     duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ) ] )
     duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ) ] )
     duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ) ] )
     
     inbox = True
     size = 40960
     mime = HC.IMAGE_JPEG
     width = 640
     height = 480
     duration = None
     num_frames = None
     has_audio = False
     num_words = None
     
     local_locations_manager = ClientMediaManagers.LocationsManager( { CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox )
     trash_locations_manager = ClientMediaManagers.LocationsManager( { CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox )
     deleted_locations_manager = ClientMediaManagers.LocationsManager( set(), { CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), inbox )
     
     # duplicate to generate proper dicts
     
     one_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } } ).Duplicate()
     two_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } } ).Duplicate()
     substantial_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } } ).Duplicate()
     empty_tags_manager = ClientMediaManagers.TagsManager( {} ).Duplicate()
     
     one_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } )
     two_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.6 } )
     substantial_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } )
     empty_ratings_manager = ClientMediaManagers.RatingsManager( {} )
     
     notes_manager = ClientMediaManagers.NotesManager( {} )
     
     file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager()
     
     #
     
     local_hash_has_values = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 1, local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     local_media_has_values = ClientMedia.MediaSingleton( media_result )
     
     #
     
     other_local_hash_has_values = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 2, other_local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     other_local_media_has_values = ClientMedia.MediaSingleton( media_result )
     
     #
     
     local_hash_empty = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 3, local_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     local_media_empty = ClientMedia.MediaSingleton( media_result )
     
     #
     
     trashed_hash_empty = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 4, trashed_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     trashed_media_empty = ClientMedia.MediaSingleton( media_result )
     
     #
     
     deleted_hash_empty = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 5, deleted_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     deleted_media_empty = ClientMedia.MediaSingleton( media_result )
     
     #
     
     one_hash = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 6, one_hash, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     one_media = ClientMedia.MediaSingleton( media_result )
     
     #
     
     two_hash = HydrusData.GenerateKey()
     
     file_info_manager = ClientMediaManagers.FileInfoManager( 7, two_hash, size, mime, width, height, duration, num_frames, has_audio, num_words )
     
     media_result = ClientMedia.MediaResult( file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager, notes_manager, file_viewing_stats_manager )
     
     two_media = ClientMedia.MediaSingleton( media_result )
     
     #
     
     self._dump_and_load_and_test( duplicate_action_options_delete_and_move, test )
     self._dump_and_load_and_test( duplicate_action_options_copy, test )
     self._dump_and_load_and_test( duplicate_action_options_merge, test )
     
     #
     
     def assertSCUEqual( one, two ):
         
         self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable( one ), TC.ConvertServiceKeysToContentUpdatesToComparable( two ) )
         
     
     file_deletion_reason = 'test delete'
     
     #
     
     result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { local_hash_empty }, reason = file_deletion_reason ) ]
     
     assertSCUEqual( result, scu )
     
     #
     
     result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.TRASH_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { trashed_hash_empty }, reason = file_deletion_reason ) ]
     
     assertSCUEqual( result, scu )
     
     #
     
     result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason )
     
     self.assertEqual( result, {} )
     
     #
     
     result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ]
     scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
     scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
     scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ]
     
     assertSCUEqual( result, scu )
     
     #
     
     result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ]
     scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
     scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ]
     scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ]
     
     assertSCUEqual( result, scu )
     
     #
     #
     
     result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason )
     
     self.assertEqual( result, {} )
     
     #
     
     result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
     scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
     scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
     
     assertSCUEqual( result, scu )
     
     #
     #
     
     result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
     scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
     scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
     
     assertSCUEqual( result, scu )
     
     #
     
     result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ]
     scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ]
     scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ]
     
     assertSCUEqual( result, scu )
     
     #
     
     result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media, file_deletion_reason = file_deletion_reason )
     
     scu = {}
     
     scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'one', { two_hash } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'two', { one_hash } ) ) ]
     scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { two_hash } ) ) ]
     scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { two_hash } ) ) ]
     
     assertSCUEqual( result, scu )
    def __init__(self, win, only_run):

        self.win = win
        self.only_run = only_run

        self.db_dir = tempfile.mkdtemp()

        global DB_DIR

        DB_DIR = self.db_dir

        self._server_files_dir = os.path.join(self.db_dir, 'server_files')
        self._updates_dir = os.path.join(self.db_dir, 'test_updates')

        client_files_default = os.path.join(self.db_dir, 'client_files')

        HydrusPaths.MakeSureDirectoryExists(self._server_files_dir)
        HydrusPaths.MakeSureDirectoryExists(self._updates_dir)
        HydrusPaths.MakeSureDirectoryExists(client_files_default)

        HG.controller = self
        HG.client_controller = self
        HG.server_controller = self
        HG.test_controller = self

        self.db = self
        self.gui = self

        self._call_to_threads = []

        self._pubsub = HydrusPubSub.HydrusPubSub(self, lambda o: True)

        self.new_options = ClientOptions.ClientOptions()

        HC.options = ClientDefaults.GetClientDefaultOptions()

        self.options = HC.options

        def show_text(text):
            pass

        HydrusData.ShowText = show_text

        self._reads = {}

        self._reads['local_booru_share_keys'] = []
        self._reads['messaging_sessions'] = []
        self._reads['options'] = ClientDefaults.GetClientDefaultOptions()
        self._reads['file_system_predicates'] = []
        self._reads['media_results'] = []

        self.example_tag_repo_service_key = HydrusData.GenerateKey()

        services = []

        services.append(
            ClientServices.GenerateService(CC.LOCAL_BOORU_SERVICE_KEY,
                                           HC.LOCAL_BOORU, 'local booru'))
        services.append(
            ClientServices.GenerateService(CC.CLIENT_API_SERVICE_KEY,
                                           HC.CLIENT_API_SERVICE,
                                           'client api'))
        services.append(
            ClientServices.GenerateService(CC.COMBINED_LOCAL_FILE_SERVICE_KEY,
                                           HC.COMBINED_LOCAL_FILE,
                                           'all local files'))
        services.append(
            ClientServices.GenerateService(CC.LOCAL_FILE_SERVICE_KEY,
                                           HC.LOCAL_FILE_DOMAIN, 'my files'))
        services.append(
            ClientServices.GenerateService(CC.TRASH_SERVICE_KEY,
                                           HC.LOCAL_FILE_TRASH_DOMAIN,
                                           'trash'))
        services.append(
            ClientServices.GenerateService(CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
                                           HC.LOCAL_TAG, 'my tags'))
        services.append(
            ClientServices.GenerateService(self.example_tag_repo_service_key,
                                           HC.TAG_REPOSITORY,
                                           'example tag repo'))
        services.append(
            ClientServices.GenerateService(CC.COMBINED_TAG_SERVICE_KEY,
                                           HC.COMBINED_TAG, 'all known tags'))
        services.append(
            ClientServices.GenerateService(
                LOCAL_RATING_LIKE_SERVICE_KEY, HC.LOCAL_RATING_LIKE,
                'example local rating like service'))
        services.append(
            ClientServices.GenerateService(
                LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.LOCAL_RATING_NUMERICAL,
                'example local rating numerical service'))

        self._reads['services'] = services

        client_files_locations = {}

        for prefix in HydrusData.IterateHexPrefixes():

            for c in ('f', 't'):

                client_files_locations[c + prefix] = client_files_default

        self._reads['client_files_locations'] = client_files_locations

        self._reads['sessions'] = []
        self._reads['tag_parents'] = {}
        self._reads['tag_siblings'] = {}
        self._reads['in_inbox'] = False

        self._writes = collections.defaultdict(list)

        self._managers = {}

        self.services_manager = ClientManagers.ServicesManager(self)
        self.client_files_manager = ClientFiles.ClientFilesManager(self)

        self.parsing_cache = ClientCaches.ParsingCache()

        bandwidth_manager = ClientNetworkingBandwidth.NetworkBandwidthManager()
        session_manager = ClientNetworkingSessions.NetworkSessionManager()
        domain_manager = ClientNetworkingDomain.NetworkDomainManager()

        ClientDefaults.SetDefaultDomainManagerData(domain_manager)

        login_manager = ClientNetworkingLogin.NetworkLoginManager()

        self.network_engine = ClientNetworking.NetworkEngine(
            self, bandwidth_manager, session_manager, domain_manager,
            login_manager)

        self.CallToThreadLongRunning(self.network_engine.MainLoop)

        self.tag_display_manager = ClientTags.TagDisplayManager()
        self.tag_siblings_manager = ClientManagers.TagSiblingsManager(self)
        self.tag_parents_manager = ClientManagers.TagParentsManager(self)
        self._managers['undo'] = ClientManagers.UndoManager(self)
        self.server_session_manager = HydrusSessions.HydrusSessionManagerServer(
        )

        self.bitmap_manager = ClientManagers.BitmapManager(self)

        self.local_booru_manager = ClientCaches.LocalBooruCache(self)
        self.client_api_manager = ClientAPI.APIManager()

        self._cookies = {}

        self._job_scheduler = HydrusThreading.JobScheduler(self)

        self._job_scheduler.start()
Exemple #17
0
    def _ImportFiles(self, job_key):

        did_work = False

        time_to_save = HydrusData.GetNow() + 600

        num_files_imported = 0
        presentation_hashes = []
        presentation_hashes_fast = set()

        i = 0

        num_total = len(self._file_seed_cache)
        num_total_unknown = self._file_seed_cache.GetFileSeedCount(
            CC.STATUS_UNKNOWN)
        num_total_done = num_total - num_total_unknown

        while True:

            file_seed = self._file_seed_cache.GetNextFileSeed(
                CC.STATUS_UNKNOWN)

            p1 = HC.options['pause_import_folders_sync'] or self._paused
            p2 = HydrusThreading.IsThreadShuttingDown()
            p3 = job_key.IsCancelled()

            if file_seed is None or p1 or p2 or p3:

                break

            did_work = True

            if HydrusData.TimeHasPassed(time_to_save):

                HG.client_controller.WriteSynchronous('serialisable', self)

                time_to_save = HydrusData.GetNow() + 600

            gauge_num_done = num_total_done + num_files_imported + 1

            job_key.SetVariable(
                'popup_text_1',
                'importing file ' + HydrusData.ConvertValueRangeToPrettyString(
                    gauge_num_done, num_total))
            job_key.SetVariable('popup_gauge_1', (gauge_num_done, num_total))

            path = file_seed.file_seed_data

            file_seed.ImportPath(self._file_seed_cache,
                                 self._file_import_options,
                                 limited_mimes=self._mimes)

            if file_seed.status in CC.SUCCESSFUL_IMPORT_STATES:

                if file_seed.HasHash():

                    hash = file_seed.GetHash()

                    if self._tag_import_options.HasAdditionalTags():

                        media_result = HG.client_controller.Read(
                            'media_result', hash)

                        downloaded_tags = []

                        service_keys_to_content_updates = self._tag_import_options.GetServiceKeysToContentUpdates(
                            file_seed.status, media_result,
                            downloaded_tags)  # additional tags

                        if len(service_keys_to_content_updates) > 0:

                            HG.client_controller.WriteSynchronous(
                                'content_updates',
                                service_keys_to_content_updates)

                    service_keys_to_tags = ClientTags.ServiceKeysToTags()

                    for (tag_service_key, filename_tagging_options) in list(
                            self._tag_service_keys_to_filename_tagging_options.
                            items()):

                        if not HG.client_controller.services_manager.ServiceExists(
                                tag_service_key):

                            continue

                        try:

                            tags = filename_tagging_options.GetTags(
                                tag_service_key, path)

                            if len(tags) > 0:

                                service_keys_to_tags[tag_service_key] = tags

                        except Exception as e:

                            HydrusData.ShowText(
                                'Trying to parse filename tags in the import folder "'
                                + self._name + '" threw an error!')

                            HydrusData.ShowException(e)

                    if len(service_keys_to_tags) > 0:

                        service_keys_to_content_updates = ClientData.ConvertServiceKeysToTagsToServiceKeysToContentUpdates(
                            {hash}, service_keys_to_tags)

                        HG.client_controller.WriteSynchronous(
                            'content_updates', service_keys_to_content_updates)

                num_files_imported += 1

                if hash not in presentation_hashes_fast:

                    if file_seed.ShouldPresent(self._file_import_options):

                        presentation_hashes.append(hash)

                        presentation_hashes_fast.add(hash)

            elif file_seed.status == CC.STATUS_ERROR:

                HydrusData.Print(
                    'A file failed to import from import folder ' +
                    self._name + ':' + path)

            i += 1

            if i % 10 == 0:

                self._ActionPaths()

        if num_files_imported > 0:

            HydrusData.Print('Import folder ' + self._name + ' imported ' +
                             HydrusData.ToHumanInt(num_files_imported) +
                             ' files.')

            if len(presentation_hashes) > 0:

                ClientImporting.PublishPresentationHashes(
                    self._name, presentation_hashes,
                    self._publish_files_to_popup_button,
                    self._publish_files_to_page)

        self._ActionPaths()

        return did_work
Exemple #18
0
    def _UpdateSerialisableInfo(self, version, old_serialisable_info):

        if version == 1:

            (serialisable_service_actions,
             delete_second_file) = old_serialisable_info

            tag_service_actions = []
            rating_service_actions = []

            # As the client isn't booted when this is loaded in options, there isn't a good way to figure out tag from rating
            # So, let's just dupe and purge later on, in serialisation
            for (service_key_encoded, action) in serialisable_service_actions:

                service_key = bytes.fromhex(service_key_encoded)

                tag_filter = ClientTags.TagFilter()

                tag_service_actions.append((service_key, action, tag_filter))

                rating_service_actions.append((service_key, action))

            serialisable_tag_service_actions = [
                (service_key.hex(), action, tag_filter.GetSerialisableTuple())
                for (service_key, action, tag_filter) in tag_service_actions
            ]
            serialisable_rating_service_actions = [
                (service_key.hex(), action)
                for (service_key, action) in rating_service_actions
            ]

            sync_archive = delete_second_file
            delete_both_files = False

            new_serialisable_info = (serialisable_tag_service_actions,
                                     serialisable_rating_service_actions,
                                     delete_second_file, sync_archive,
                                     delete_both_files)

            return (2, new_serialisable_info)

        if version == 2:

            (serialisable_tag_service_actions,
             serialisable_rating_service_actions, delete_second_file,
             sync_archive, delete_both_files) = old_serialisable_info

            sync_urls_action = None

            new_serialisable_info = (serialisable_tag_service_actions,
                                     serialisable_rating_service_actions,
                                     delete_second_file, sync_archive,
                                     delete_both_files, sync_urls_action)

            return (3, new_serialisable_info)

        if version == 3:

            (serialisable_tag_service_actions,
             serialisable_rating_service_actions, delete_second_file,
             sync_archive, delete_both_files,
             sync_urls_action) = old_serialisable_info

            new_serialisable_info = (serialisable_tag_service_actions,
                                     serialisable_rating_service_actions,
                                     sync_archive, sync_urls_action)

            return (4, new_serialisable_info)