def run_test( source, expected_data ):
     
     destination = ClientMigration.MigrationDestinationListPairs( self )
     
     job = ClientMigration.MigrationJob( self, 'test', source, destination )
     
     job.Run()
     
     self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
    def _test_mappings_list_to_list(self):

        data = list(self._hashes_to_current_tags.items())

        self.assertTrue(len(data) > 0)

        source = ClientMigration.MigrationSourceList(self, data)
        destination = ClientMigration.MigrationDestinationListMappings(self)

        job = ClientMigration.MigrationJob(self, 'test', source, destination)

        job.Run()

        self.assertEqual(destination.GetDataReceived(), data)
 def run_test( source, destination_path, content_type, expected_data ):
     
     destination = ClientMigration.MigrationDestinationHTPA( self, destination_path, content_type )
     
     job = ClientMigration.MigrationJob( self, 'test', source, destination )
     
     job.Run()
     
     hta = HydrusTagArchive.HydrusTagPairArchive( destination_path )
     
     result = list( hta.IteratePairs() )
     
     self.assertEqual( set( result ), set( expected_data ) )
     
     hta.Close()
 def run_test( source, destination_path, desired_hash_type, expected_data ):
     
     destination = ClientMigration.MigrationDestinationHTA( self, destination_path, desired_hash_type )
     
     job = ClientMigration.MigrationJob( self, 'test', source, destination )
     
     job.Run()
     
     hta = HydrusTagArchive.HydrusTagArchive( destination_path )
     
     result = list( hta.IterateMappings() )
     
     self.assertEqual( dict( result ), dict( expected_data ) )
     
     hta.Close()
        def run_test(source, tag_service_key, content_action, expected_data):

            destination = ClientMigration.MigrationDestinationTagServicePairs(
                self, tag_service_key, content_action, content_type)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            if content_type == HC.CONTENT_TYPE_TAG_PARENTS:

                statuses_to_pairs = self.Read('tag_parents', tag_service_key)

            elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:

                statuses_to_pairs = self.Read('tag_siblings', tag_service_key)

            if content_action == HC.CONTENT_UPDATE_ADD:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_CURRENT])
                should_not_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_DELETED])

            elif content_action == HC.CONTENT_UPDATE_DELETE:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_DELETED])
                should_not_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_CURRENT])

            elif content_action == HC.CONTENT_UPDATE_PEND:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_PENDING])
                should_not_be_in = set()

            elif content_action == HC.CONTENT_UPDATE_PETITION:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_PETITIONED])
                should_not_be_in = set()

            for pair in expected_data:

                self.assertIn(pair, should_be_in)
                self.assertNotIn(pair, should_not_be_in)
    def _test_pairs_list_to_htpa(self, content_type):
        def run_test(source, destination_path, content_type, expected_data):

            destination = ClientMigration.MigrationDestinationHTPA(
                self, destination_path, content_type)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            hta = HydrusTagArchive.HydrusTagPairArchive(destination_path)

            result = list(hta.IteratePairs())

            self.assertEqual(set(result), set(expected_data))

            hta.Close()

        (current, pending, to_be_pended,
         deleted) = pair_types_to_pools[content_type]

        htpa_path = os.path.join(TestController.DB_DIR, 'htpa.db')

        #

        source = ClientMigration.MigrationSourceList(self, current)

        run_test(source, htpa_path, content_type, list(current))

        #

        os.remove(htpa_path)
 def _test_pairs_list_to_list( self, content_type ):
     
     ( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
     
     data = list( current )
     
     self.assertTrue( len( data ) > 0 )
     
     source = ClientMigration.MigrationSourceList( self, data )
     destination = ClientMigration.MigrationDestinationListPairs( self )
     
     job = ClientMigration.MigrationJob( self, 'test', source, destination )
     
     job.Run()
     
     self.assertEqual( destination.GetDataReceived(), data )
 def _test_pairs_service_to_list( self, content_type ):
     
     def run_test( source, expected_data ):
         
         destination = ClientMigration.MigrationDestinationListPairs( self )
         
         job = ClientMigration.MigrationJob( self, 'test', source, destination )
         
         job.Run()
         
         self.assertEqual( set( destination.GetDataReceived() ), set( expected_data ) )
         
     
     ( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ]
     
     # test filters and content statuses
     
     tag_repo_service_key = self._test_tag_repo_service_keys[10]
     
     content_source_tests = []
     
     content_source_tests.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
     content_source_tests.append( ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
     content_source_tests.append( ( tag_repo_service_key, ( current, ), ( HC.CONTENT_STATUS_CURRENT, ) ) )
     content_source_tests.append( ( tag_repo_service_key, ( current, pending ), ( HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING ) ) )
     content_source_tests.append( ( tag_repo_service_key, ( deleted, ), ( HC.CONTENT_STATUS_DELETED, ) ) )
     
     free_filter = ClientTags.TagFilter()
     
     namespace_filter = ClientTags.TagFilter()
     
     namespace_filter.SetRule( ':', CC.FILTER_WHITELIST )
     namespace_filter.SetRule( '', CC.FILTER_BLACKLIST )
     
     test_filters = []
     
     test_filters.append( ( free_filter, free_filter ) )
     test_filters.append( ( namespace_filter, free_filter ) )
     test_filters.append( ( free_filter, namespace_filter ) )
     test_filters.append( ( namespace_filter, namespace_filter ) )
     
     for ( left_tag_filter, right_tag_filter ) in test_filters:
         
         for ( service_key, content_lists, content_statuses ) in content_source_tests:
             
             source = ClientMigration.MigrationSourceTagServicePairs( self, service_key, content_type, left_tag_filter, right_tag_filter, content_statuses )
             
             expected_data = set()
             
             for content_list in content_lists:
                 
                 expected_data.update( ( ( left_tag, right_tag ) for ( left_tag, right_tag ) in content_list if left_tag_filter.TagOK( left_tag ) and right_tag_filter.TagOK( right_tag ) ) )
                 
             
             run_test( source, expected_data )
    def _test_mappings_list_to_hta(self):
        def run_test(source, destination_path, desired_hash_type,
                     expected_data):

            destination = ClientMigration.MigrationDestinationHTA(
                self, destination_path, desired_hash_type)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            hta = HydrusTagArchive.HydrusTagArchive(destination_path)

            result = list(hta.IterateMappings())

            self.assertEqual(dict(result), dict(expected_data))

            hta.Close()

        md5_hta_path = os.path.join(TestController.DB_DIR, 'md5hta.db')
        sha256_hta_path = os.path.join(TestController.DB_DIR, 'sha256hta.db')

        #

        md5_data = [(self._sha256_to_md5[hash], tags)
                    for (hash, tags) in self._hashes_to_current_tags.items()]
        sha256_data = list(self._hashes_to_current_tags.items())

        md5_source = ClientMigration.MigrationSourceList(self, md5_data)
        sha256_source = ClientMigration.MigrationSourceList(self, sha256_data)

        run_test(md5_source, md5_hta_path, 'md5', md5_data)
        run_test(sha256_source, sha256_hta_path, 'sha256', sha256_data)

        #

        os.remove(md5_hta_path)
        os.remove(sha256_hta_path)
Beispiel #10
0
    def _test_pairs_htpa_to_list(self, content_type):
        def run_test(source, expected_data):

            destination = ClientMigration.MigrationDestinationListPairs(self)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            self.assertEqual(set(destination.GetDataReceived()),
                             set(expected_data))

        (current, pending, to_be_pended,
         deleted) = pair_types_to_pools[content_type]

        htpa_path = os.path.join(TestController.DB_DIR, 'htpa.db')

        htpa = HydrusTagArchive.HydrusTagPairArchive(htpa_path)

        if content_type == HC.CONTENT_TYPE_TAG_PARENTS:

            htpa.SetPairType(HydrusTagArchive.TAG_PAIR_TYPE_PARENTS)

        elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:

            htpa.SetPairType(HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS)

        htpa.BeginBigJob()

        htpa.AddPairs(current)

        htpa.CommitBigJob()

        htpa.Optimise()

        htpa.Close()

        del htpa

        #

        # test tag filter, left, right, both

        free_filter = HydrusTags.TagFilter()

        namespace_filter = HydrusTags.TagFilter()

        namespace_filter.SetRule(':', HC.FILTER_WHITELIST)
        namespace_filter.SetRule('', HC.FILTER_BLACKLIST)

        test_filters = []

        test_filters.append((free_filter, free_filter))
        test_filters.append((namespace_filter, free_filter))
        test_filters.append((free_filter, namespace_filter))
        test_filters.append((namespace_filter, namespace_filter))

        for (left_tag_filter, right_tag_filter) in test_filters:

            source = ClientMigration.MigrationSourceHTPA(
                self, htpa_path, left_tag_filter, right_tag_filter)

            expected_data = [(left_tag, right_tag)
                             for (left_tag, right_tag) in current
                             if left_tag_filter.TagOK(left_tag)
                             and right_tag_filter.TagOK(right_tag)]

            run_test(source, expected_data)

        #

        os.remove(htpa_path)
Beispiel #11
0
        def run_test(source, tag_service_key, content_action, expected_data):

            destination = ClientMigration.MigrationDestinationTagServiceMappings(
                self, tag_service_key, content_action)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            self._db._weakref_media_result_cache = ClientMediaResultCache.MediaResultCache(
            )

            hashes_to_media_results = {
                media_result.GetHash(): media_result
                for media_result in self.Read(
                    'media_results', list(self._hashes_to_current_tags.keys()))
            }

            for (hash, tags) in expected_data:

                media_result = hashes_to_media_results[hash]

                t_m = media_result.GetTagsManager()

                if content_action == HC.CONTENT_UPDATE_ADD:

                    current_tags = t_m.GetCurrent(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertIn(tag, current_tags)

                elif content_action == HC.CONTENT_UPDATE_DELETE:

                    current_tags = t_m.GetCurrent(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)
                    deleted_tags = t_m.GetDeleted(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertNotIn(tag, current_tags)
                        self.assertIn(tag, deleted_tags)

                elif content_action == HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD:

                    deleted_tags = t_m.GetDeleted(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertNotIn(tag, deleted_tags)

                elif content_action == HC.CONTENT_UPDATE_PEND:

                    pending_tags = t_m.GetPending(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertIn(tag, pending_tags)

                elif content_action == HC.CONTENT_UPDATE_PETITION:

                    petitioned_tags = t_m.GetPetitioned(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertIn(tag, petitioned_tags)
Beispiel #12
0
    def _test_mappings_list_to_service(self):
        def run_test(source, tag_service_key, content_action, expected_data):

            destination = ClientMigration.MigrationDestinationTagServiceMappings(
                self, tag_service_key, content_action)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            self._db._weakref_media_result_cache = ClientMediaResultCache.MediaResultCache(
            )

            hashes_to_media_results = {
                media_result.GetHash(): media_result
                for media_result in self.Read(
                    'media_results', list(self._hashes_to_current_tags.keys()))
            }

            for (hash, tags) in expected_data:

                media_result = hashes_to_media_results[hash]

                t_m = media_result.GetTagsManager()

                if content_action == HC.CONTENT_UPDATE_ADD:

                    current_tags = t_m.GetCurrent(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertIn(tag, current_tags)

                elif content_action == HC.CONTENT_UPDATE_DELETE:

                    current_tags = t_m.GetCurrent(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)
                    deleted_tags = t_m.GetDeleted(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertNotIn(tag, current_tags)
                        self.assertIn(tag, deleted_tags)

                elif content_action == HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD:

                    deleted_tags = t_m.GetDeleted(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertNotIn(tag, deleted_tags)

                elif content_action == HC.CONTENT_UPDATE_PEND:

                    pending_tags = t_m.GetPending(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertIn(tag, pending_tags)

                elif content_action == HC.CONTENT_UPDATE_PETITION:

                    petitioned_tags = t_m.GetPetitioned(
                        tag_service_key, ClientTags.TAG_DISPLAY_STORAGE)

                    for tag in tags:

                        self.assertIn(tag, petitioned_tags)

        #

        # local add

        data = [(hash, set(random.sample(to_be_pended_tag_pool, 2)))
                for hash in self._hashes_to_current_tags.keys()]

        source = ClientMigration.MigrationSourceList(self, data)

        run_test(source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
                 HC.CONTENT_UPDATE_ADD, data)

        # local delete

        data = [(hash, set(random.sample(tags, 2)))
                for (hash, tags) in self._hashes_to_current_tags.items()]

        source = ClientMigration.MigrationSourceList(self, data)

        run_test(source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
                 HC.CONTENT_UPDATE_DELETE, data)

        # local clear deletion record

        data = [(hash, set(random.sample(tags, 2)))
                for (hash, tags) in self._hashes_to_deleted_tags.items()]

        source = ClientMigration.MigrationSourceList(self, data)

        run_test(source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
                 HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD, data)

        # tag repo pend

        data = [(hash, set(random.sample(to_be_pended_tag_pool, 2)))
                for hash in self._hashes_to_current_tags.keys()]

        source = ClientMigration.MigrationSourceList(self, data)

        run_test(source, self._test_tag_repo_service_keys[1],
                 HC.CONTENT_UPDATE_PEND, data)

        # tag repo petition

        data = [(hash, set(random.sample(tags, 2)))
                for (hash, tags) in self._hashes_to_current_tags.items()]

        source = ClientMigration.MigrationSourceList(self, data)

        run_test(source, self._test_tag_repo_service_keys[1],
                 HC.CONTENT_UPDATE_PETITION, data)
Beispiel #13
0
    def _test_mappings_service_to_list(self):
        def run_test(source, expected_data):

            destination = ClientMigration.MigrationDestinationListMappings(
                self)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            self.assertEqual(dict(destination.GetDataReceived()),
                             dict(expected_data))

        # test file filter

        tag_repo_service_key = self._test_tag_repo_service_keys[0]

        tag_filter = HydrusTags.TagFilter()

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
            CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter,
            (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = list(self._hashes_to_current_tags.items())

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter, (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = list(self._hashes_to_current_tags.items())

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, CC.LOCAL_FILE_SERVICE_KEY,
            'sha256', None, tag_filter, (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = [(hash, tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in self._my_files_sha256]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.LOCAL_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter, (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = [(hash, tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in self._my_files_sha256]

        run_test(source, expected_data)

        # not all hashes, since hash type lookup only available for imported files
        hashes = random.sample(self._my_files_sha256, 25)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
            CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter,
            (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = [(hash, tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in hashes]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            hashes, tag_filter, (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = [(hash, tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in hashes]

        run_test(source, expected_data)

        # test desired hash type

        # not all hashes, since hash type lookup only available for imported files
        expected_data = [(self._sha256_to_sha1[hash], tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in self._my_files_sha256]

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
            CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter,
            (HC.CONTENT_STATUS_CURRENT, ))

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha1',
            None, tag_filter, (HC.CONTENT_STATUS_CURRENT, ))

        run_test(source, expected_data)

        # tag filter

        tag_filter = HydrusTags.TagFilter()

        tag_filter.SetRule('', HC.FILTER_WHITELIST)
        tag_filter.SetRule(':', HC.FILTER_BLACKLIST)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
            CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter,
            (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = [(hash, tag_filter.Filter(tags))
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()]
        expected_data = [(hash, tags) for (hash, tags) in expected_data
                         if len(tags) > 0]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter, (HC.CONTENT_STATUS_CURRENT, ))

        expected_data = [(hash, tag_filter.Filter(tags))
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()]
        expected_data = [(hash, tags) for (hash, tags) in expected_data
                         if len(tags) > 0]

        run_test(source, expected_data)

        # test statuses

        tag_filter = HydrusTags.TagFilter()

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
            CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter,
            (HC.CONTENT_STATUS_DELETED, ))

        expected_data = list(self._hashes_to_deleted_tags.items())

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter, (HC.CONTENT_STATUS_DELETED, ))

        expected_data = list(self._hashes_to_deleted_tags.items())

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceTagServiceMappings(
            self, tag_repo_service_key, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter,
            (HC.CONTENT_STATUS_CURRENT, HC.CONTENT_STATUS_PENDING))

        expected_data = collections.defaultdict(set)

        for (hash, tags) in self._hashes_to_current_tags.items():

            expected_data[hash].update(tags)

        for (hash, tags) in self._hashes_to_pending_tags.items():

            expected_data[hash].update(tags)

        expected_data = list(expected_data.items())

        run_test(source, expected_data)
Beispiel #14
0
    def _test_mappings_hta_to_list(self):
        def run_test(source, expected_data):

            destination = ClientMigration.MigrationDestinationListMappings(
                self)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            self.assertEqual(dict(destination.GetDataReceived()),
                             dict(expected_data))

        md5_hta_path = os.path.join(TestController.DB_DIR, 'md5hta.db')
        sha256_hta_path = os.path.join(TestController.DB_DIR, 'sha256hta.db')

        md5_hta = HydrusTagArchive.HydrusTagArchive(md5_hta_path)
        sha256_hta = HydrusTagArchive.HydrusTagArchive(sha256_hta_path)

        md5_hta.SetHashType(HydrusTagArchive.HASH_TYPE_MD5)
        sha256_hta.SetHashType(HydrusTagArchive.HASH_TYPE_SHA256)

        md5_hta.BeginBigJob()
        sha256_hta.BeginBigJob()

        for (hash, tags) in self._hashes_to_current_tags.items():

            md5 = self._sha256_to_md5[hash]

            md5_hta.AddMappings(md5, tags)
            sha256_hta.AddMappings(hash, tags)

        md5_hta.CommitBigJob()
        sha256_hta.CommitBigJob()

        md5_hta.Optimise()
        sha256_hta.Optimise()

        md5_hta.Close()
        sha256_hta.Close()

        del md5_hta
        del sha256_hta

        #

        # test file filter

        tag_filter = HydrusTags.TagFilter()

        source = ClientMigration.MigrationSourceHTA(
            self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None,
            tag_filter)

        expected_data = [(self._sha256_to_md5[hash], tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceHTA(
            self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter)

        expected_data = list(self._hashes_to_current_tags.items())

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceHTA(self, md5_hta_path,
                                                    CC.LOCAL_FILE_SERVICE_KEY,
                                                    'md5', None, tag_filter)

        expected_data = [(self._sha256_to_md5[hash], tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in self._my_files_sha256]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceHTA(self, sha256_hta_path,
                                                    CC.LOCAL_FILE_SERVICE_KEY,
                                                    'sha256', None, tag_filter)

        expected_data = [(hash, tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in self._my_files_sha256]

        run_test(source, expected_data)

        # not all hashes, since hash type lookup only available for imported files
        hashes = random.sample(self._my_files_sha256, 25)

        source = ClientMigration.MigrationSourceHTA(
            self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', hashes,
            tag_filter)

        expected_data = [(self._sha256_to_md5[hash], tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in hashes]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceHTA(
            self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            hashes, tag_filter)

        expected_data = [(hash, tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in hashes]

        run_test(source, expected_data)

        # test desired hash type

        # not all hashes, since hash type lookup only available for imported files
        expected_data = [(self._sha256_to_sha1[hash], tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in self._my_files_sha256]

        source = ClientMigration.MigrationSourceHTA(
            self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None,
            tag_filter)

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceHTA(
            self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None,
            tag_filter)

        run_test(source, expected_data)

        # do a test with specific hashes, so md5->sha1 does interim sha256 conversion
        # not all hashes, since hash type lookup only available for imported files
        hashes = random.sample(self._my_files_sha256, 25)

        expected_data = [(self._sha256_to_sha1[hash], tags)
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()
                         if hash in hashes]

        source = ClientMigration.MigrationSourceHTA(
            self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', hashes,
            tag_filter)

        run_test(source, expected_data)

        # tag filter

        tag_filter = HydrusTags.TagFilter()

        tag_filter.SetRule('', HC.FILTER_WHITELIST)
        tag_filter.SetRule(':', HC.FILTER_BLACKLIST)

        source = ClientMigration.MigrationSourceHTA(
            self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None,
            tag_filter)

        expected_data = [(self._sha256_to_md5[hash], tag_filter.Filter(tags))
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()]
        expected_data = [(hash, tags) for (hash, tags) in expected_data
                         if len(tags) > 0]

        run_test(source, expected_data)

        source = ClientMigration.MigrationSourceHTA(
            self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256',
            None, tag_filter)

        expected_data = [(hash, tag_filter.Filter(tags))
                         for (hash,
                              tags) in self._hashes_to_current_tags.items()]
        expected_data = [(hash, tags) for (hash, tags) in expected_data
                         if len(tags) > 0]

        run_test(source, expected_data)

        #

        os.remove(md5_hta_path)
        os.remove(sha256_hta_path)
Beispiel #15
0
    def _test_pairs_list_to_service(self, content_type):
        def run_test(source, tag_service_key, content_action, expected_data):

            destination = ClientMigration.MigrationDestinationTagServicePairs(
                self, tag_service_key, content_action, content_type)

            job = ClientMigration.MigrationJob(self, 'test', source,
                                               destination)

            job.Run()

            if content_type == HC.CONTENT_TYPE_TAG_PARENTS:

                statuses_to_pairs = self.Read('tag_parents', tag_service_key)

            elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS:

                statuses_to_pairs = self.Read('tag_siblings', tag_service_key)

            if content_action == HC.CONTENT_UPDATE_ADD:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_CURRENT])
                should_not_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_DELETED])

            elif content_action == HC.CONTENT_UPDATE_DELETE:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_DELETED])
                should_not_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_CURRENT])

            elif content_action == HC.CONTENT_UPDATE_PEND:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_PENDING])
                should_not_be_in = set()

            elif content_action == HC.CONTENT_UPDATE_PETITION:

                should_be_in = set(
                    statuses_to_pairs[HC.CONTENT_STATUS_PETITIONED])
                should_not_be_in = set()

            for pair in expected_data:

                self.assertIn(pair, should_be_in)
                self.assertNotIn(pair, should_not_be_in)

        #

        tag_repo_service_key = self._test_tag_repo_service_keys[11]

        (current, pending, to_be_pended,
         deleted) = pair_types_to_pools[content_type]

        test_rows = []

        test_rows.append((CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, to_be_pended,
                          HC.CONTENT_UPDATE_ADD))
        test_rows.append((CC.DEFAULT_LOCAL_TAG_SERVICE_KEY,
                          random.sample(current, 3), HC.CONTENT_UPDATE_DELETE))
        test_rows.append(
            (tag_repo_service_key, to_be_pended, HC.CONTENT_UPDATE_PEND))
        test_rows.append((tag_repo_service_key, random.sample(current, 3),
                          HC.CONTENT_UPDATE_PETITION))

        for (service_key, data, action) in test_rows:

            source = ClientMigration.MigrationSourceList(self, data)

            run_test(source, service_key, action, data)