def Prepare(self): self._htpa = HydrusTagArchive.HydrusTagPairArchive(self._path) self._htpa.BeginBigJob() self._iterator = self._htpa.IteratePairs()
def Prepare( self ): self._hta = HydrusTagArchive.HydrusTagArchive( self._path ) self._hta.BeginBigJob() self._source_hash_type = HydrusTagArchive.hash_type_to_str_lookup[ self._hta.GetHashType() ] self._iterator = self._hta.IterateMappings()
def Prepare(self): self._time_started = HydrusData.GetNow() self._htpa = HydrusTagArchive.HydrusTagPairArchive(self._path) pair_type = content_types_to_pair_types[self._content_type] self._htpa.SetPairType(pair_type) self._htpa.BeginBigJob()
def Prepare( self ): self._time_started = HydrusData.GetNow() self._hta = HydrusTagArchive.HydrusTagArchive( self._path ) hta_hash_type = HydrusTagArchive.hash_str_to_type_lookup[ self._desired_hash_type ] self._hta.SetHashType( hta_hash_type ) self._hta.BeginBigJob()
def run_test( source, destination_path, content_type, expected_data ): destination = ClientMigration.MigrationDestinationHTPA( self, destination_path, content_type ) job = ClientMigration.MigrationJob( self, 'test', source, destination ) job.Run() hta = HydrusTagArchive.HydrusTagPairArchive( destination_path ) result = list( hta.IteratePairs() ) self.assertEqual( set( result ), set( expected_data ) ) hta.Close()
def run_test( source, destination_path, desired_hash_type, expected_data ): destination = ClientMigration.MigrationDestinationHTA( self, destination_path, desired_hash_type ) job = ClientMigration.MigrationJob( self, 'test', source, destination ) job.Run() hta = HydrusTagArchive.HydrusTagArchive( destination_path ) result = list( hta.IterateMappings() ) self.assertEqual( dict( result ), dict( expected_data ) ) hta.Close()
def _test_pairs_htpa_to_list(self, content_type): def run_test(source, expected_data): destination = ClientMigration.MigrationDestinationListPairs(self) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() self.assertEqual(set(destination.GetDataReceived()), set(expected_data)) (current, pending, to_be_pended, deleted) = pair_types_to_pools[content_type] htpa_path = os.path.join(TestController.DB_DIR, 'htpa.db') htpa = HydrusTagArchive.HydrusTagPairArchive(htpa_path) if content_type == HC.CONTENT_TYPE_TAG_PARENTS: htpa.SetPairType(HydrusTagArchive.TAG_PAIR_TYPE_PARENTS) elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS: htpa.SetPairType(HydrusTagArchive.TAG_PAIR_TYPE_SIBLINGS) htpa.BeginBigJob() htpa.AddPairs(current) htpa.CommitBigJob() htpa.Optimise() htpa.Close() del htpa # # test tag filter, left, right, both free_filter = HydrusTags.TagFilter() namespace_filter = HydrusTags.TagFilter() namespace_filter.SetRule(':', HC.FILTER_WHITELIST) namespace_filter.SetRule('', HC.FILTER_BLACKLIST) test_filters = [] test_filters.append((free_filter, free_filter)) test_filters.append((namespace_filter, free_filter)) test_filters.append((free_filter, namespace_filter)) test_filters.append((namespace_filter, namespace_filter)) for (left_tag_filter, right_tag_filter) in test_filters: source = ClientMigration.MigrationSourceHTPA( self, htpa_path, left_tag_filter, right_tag_filter) expected_data = [(left_tag, right_tag) for (left_tag, right_tag) in current if left_tag_filter.TagOK(left_tag) and right_tag_filter.TagOK(right_tag)] run_test(source, expected_data) # os.remove(htpa_path)
def _test_mappings_hta_to_list(self): def run_test(source, expected_data): destination = ClientMigration.MigrationDestinationListMappings( self) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() self.assertEqual(dict(destination.GetDataReceived()), dict(expected_data)) md5_hta_path = os.path.join(TestController.DB_DIR, 'md5hta.db') sha256_hta_path = os.path.join(TestController.DB_DIR, 'sha256hta.db') md5_hta = HydrusTagArchive.HydrusTagArchive(md5_hta_path) sha256_hta = HydrusTagArchive.HydrusTagArchive(sha256_hta_path) md5_hta.SetHashType(HydrusTagArchive.HASH_TYPE_MD5) sha256_hta.SetHashType(HydrusTagArchive.HASH_TYPE_SHA256) md5_hta.BeginBigJob() sha256_hta.BeginBigJob() for (hash, tags) in self._hashes_to_current_tags.items(): md5 = self._sha256_to_md5[hash] md5_hta.AddMappings(md5, tags) sha256_hta.AddMappings(hash, tags) md5_hta.CommitBigJob() sha256_hta.CommitBigJob() md5_hta.Optimise() sha256_hta.Optimise() md5_hta.Close() sha256_hta.Close() del md5_hta del sha256_hta # # test file filter tag_filter = HydrusTags.TagFilter() source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None, tag_filter) expected_data = [(self._sha256_to_md5[hash], tags) for (hash, tags) in self._hashes_to_current_tags.items()] run_test(source, expected_data) source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter) expected_data = list(self._hashes_to_current_tags.items()) run_test(source, expected_data) source = ClientMigration.MigrationSourceHTA(self, md5_hta_path, CC.LOCAL_FILE_SERVICE_KEY, 'md5', None, tag_filter) expected_data = [(self._sha256_to_md5[hash], tags) for (hash, tags) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256] run_test(source, expected_data) source = ClientMigration.MigrationSourceHTA(self, sha256_hta_path, CC.LOCAL_FILE_SERVICE_KEY, 'sha256', None, tag_filter) expected_data = [(hash, tags) for (hash, tags) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256] run_test(source, expected_data) # not all hashes, since hash type lookup only available for imported files hashes = random.sample(self._my_files_sha256, 25) source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', hashes, tag_filter) expected_data = [(self._sha256_to_md5[hash], tags) for (hash, tags) in self._hashes_to_current_tags.items() if hash in hashes] run_test(source, expected_data) source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', hashes, tag_filter) expected_data = [(hash, tags) for (hash, tags) in self._hashes_to_current_tags.items() if hash in hashes] run_test(source, expected_data) # test desired hash type # not all hashes, since hash type lookup only available for imported files expected_data = [(self._sha256_to_sha1[hash], tags) for (hash, tags) in self._hashes_to_current_tags.items() if hash in self._my_files_sha256] source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter) run_test(source, expected_data) source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', None, tag_filter) run_test(source, expected_data) # do a test with specific hashes, so md5->sha1 does interim sha256 conversion # not all hashes, since hash type lookup only available for imported files hashes = random.sample(self._my_files_sha256, 25) expected_data = [(self._sha256_to_sha1[hash], tags) for (hash, tags) in self._hashes_to_current_tags.items() if hash in hashes] source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha1', hashes, tag_filter) run_test(source, expected_data) # tag filter tag_filter = HydrusTags.TagFilter() tag_filter.SetRule('', HC.FILTER_WHITELIST) tag_filter.SetRule(':', HC.FILTER_BLACKLIST) source = ClientMigration.MigrationSourceHTA( self, md5_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'md5', None, tag_filter) expected_data = [(self._sha256_to_md5[hash], tag_filter.Filter(tags)) for (hash, tags) in self._hashes_to_current_tags.items()] expected_data = [(hash, tags) for (hash, tags) in expected_data if len(tags) > 0] run_test(source, expected_data) source = ClientMigration.MigrationSourceHTA( self, sha256_hta_path, CC.COMBINED_FILE_SERVICE_KEY, 'sha256', None, tag_filter) expected_data = [(hash, tag_filter.Filter(tags)) for (hash, tags) in self._hashes_to_current_tags.items()] expected_data = [(hash, tags) for (hash, tags) in expected_data if len(tags) > 0] run_test(source, expected_data) # os.remove(md5_hta_path) os.remove(sha256_hta_path)