def _test_pairs_list_to_htpa(self, content_type): def run_test(source, destination_path, content_type, expected_data): destination = ClientMigration.MigrationDestinationHTPA( self, destination_path, content_type) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() hta = HydrusTagArchive.HydrusTagPairArchive(destination_path) result = list(hta.IteratePairs()) self.assertEqual(set(result), set(expected_data)) hta.Close() (current, pending, to_be_pended, deleted) = pair_types_to_pools[content_type] htpa_path = os.path.join(TestController.DB_DIR, 'htpa.db') # source = ClientMigration.MigrationSourceList(self, current) run_test(source, htpa_path, content_type, list(current)) # os.remove(htpa_path)
def _test_mappings_list_to_hta(self): def run_test(source, destination_path, desired_hash_type, expected_data): destination = ClientMigration.MigrationDestinationHTA( self, destination_path, desired_hash_type) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() hta = HydrusTagArchive.HydrusTagArchive(destination_path) result = list(hta.IterateMappings()) self.assertEqual(dict(result), dict(expected_data)) hta.Close() md5_hta_path = os.path.join(TestController.DB_DIR, 'md5hta.db') sha256_hta_path = os.path.join(TestController.DB_DIR, 'sha256hta.db') # md5_data = [(self._sha256_to_md5[hash], tags) for (hash, tags) in self._hashes_to_current_tags.items()] sha256_data = list(self._hashes_to_current_tags.items()) md5_source = ClientMigration.MigrationSourceList(self, md5_data) sha256_source = ClientMigration.MigrationSourceList(self, sha256_data) run_test(md5_source, md5_hta_path, 'md5', md5_data) run_test(sha256_source, sha256_hta_path, 'sha256', sha256_data) # os.remove(md5_hta_path) os.remove(sha256_hta_path)
def _test_mappings_list_to_list(self): data = list(self._hashes_to_current_tags.items()) self.assertTrue(len(data) > 0) source = ClientMigration.MigrationSourceList(self, data) destination = ClientMigration.MigrationDestinationListMappings(self) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() self.assertEqual(destination.GetDataReceived(), data)
def _test_pairs_list_to_list( self, content_type ): ( current, pending, to_be_pended, deleted ) = pair_types_to_pools[ content_type ] data = list( current ) self.assertTrue( len( data ) > 0 ) source = ClientMigration.MigrationSourceList( self, data ) destination = ClientMigration.MigrationDestinationListPairs( self ) job = ClientMigration.MigrationJob( self, 'test', source, destination ) job.Run() self.assertEqual( destination.GetDataReceived(), data )
def _test_mappings_list_to_service(self): def run_test(source, tag_service_key, content_action, expected_data): destination = ClientMigration.MigrationDestinationTagServiceMappings( self, tag_service_key, content_action) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() self._db._weakref_media_result_cache = ClientMediaResultCache.MediaResultCache( ) hashes_to_media_results = { media_result.GetHash(): media_result for media_result in self.Read( 'media_results', list(self._hashes_to_current_tags.keys())) } for (hash, tags) in expected_data: media_result = hashes_to_media_results[hash] t_m = media_result.GetTagsManager() if content_action == HC.CONTENT_UPDATE_ADD: current_tags = t_m.GetCurrent( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE) for tag in tags: self.assertIn(tag, current_tags) elif content_action == HC.CONTENT_UPDATE_DELETE: current_tags = t_m.GetCurrent( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE) deleted_tags = t_m.GetDeleted( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE) for tag in tags: self.assertNotIn(tag, current_tags) self.assertIn(tag, deleted_tags) elif content_action == HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD: deleted_tags = t_m.GetDeleted( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE) for tag in tags: self.assertNotIn(tag, deleted_tags) elif content_action == HC.CONTENT_UPDATE_PEND: pending_tags = t_m.GetPending( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE) for tag in tags: self.assertIn(tag, pending_tags) elif content_action == HC.CONTENT_UPDATE_PETITION: petitioned_tags = t_m.GetPetitioned( tag_service_key, ClientTags.TAG_DISPLAY_STORAGE) for tag in tags: self.assertIn(tag, petitioned_tags) # # local add data = [(hash, set(random.sample(to_be_pended_tag_pool, 2))) for hash in self._hashes_to_current_tags.keys()] source = ClientMigration.MigrationSourceList(self, data) run_test(source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_UPDATE_ADD, data) # local delete data = [(hash, set(random.sample(tags, 2))) for (hash, tags) in self._hashes_to_current_tags.items()] source = ClientMigration.MigrationSourceList(self, data) run_test(source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_UPDATE_DELETE, data) # local clear deletion record data = [(hash, set(random.sample(tags, 2))) for (hash, tags) in self._hashes_to_deleted_tags.items()] source = ClientMigration.MigrationSourceList(self, data) run_test(source, CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_UPDATE_CLEAR_DELETE_RECORD, data) # tag repo pend data = [(hash, set(random.sample(to_be_pended_tag_pool, 2))) for hash in self._hashes_to_current_tags.keys()] source = ClientMigration.MigrationSourceList(self, data) run_test(source, self._test_tag_repo_service_keys[1], HC.CONTENT_UPDATE_PEND, data) # tag repo petition data = [(hash, set(random.sample(tags, 2))) for (hash, tags) in self._hashes_to_current_tags.items()] source = ClientMigration.MigrationSourceList(self, data) run_test(source, self._test_tag_repo_service_keys[1], HC.CONTENT_UPDATE_PETITION, data)
def _test_pairs_list_to_service(self, content_type): def run_test(source, tag_service_key, content_action, expected_data): destination = ClientMigration.MigrationDestinationTagServicePairs( self, tag_service_key, content_action, content_type) job = ClientMigration.MigrationJob(self, 'test', source, destination) job.Run() if content_type == HC.CONTENT_TYPE_TAG_PARENTS: statuses_to_pairs = self.Read('tag_parents', tag_service_key) elif content_type == HC.CONTENT_TYPE_TAG_SIBLINGS: statuses_to_pairs = self.Read('tag_siblings', tag_service_key) if content_action == HC.CONTENT_UPDATE_ADD: should_be_in = set( statuses_to_pairs[HC.CONTENT_STATUS_CURRENT]) should_not_be_in = set( statuses_to_pairs[HC.CONTENT_STATUS_DELETED]) elif content_action == HC.CONTENT_UPDATE_DELETE: should_be_in = set( statuses_to_pairs[HC.CONTENT_STATUS_DELETED]) should_not_be_in = set( statuses_to_pairs[HC.CONTENT_STATUS_CURRENT]) elif content_action == HC.CONTENT_UPDATE_PEND: should_be_in = set( statuses_to_pairs[HC.CONTENT_STATUS_PENDING]) should_not_be_in = set() elif content_action == HC.CONTENT_UPDATE_PETITION: should_be_in = set( statuses_to_pairs[HC.CONTENT_STATUS_PETITIONED]) should_not_be_in = set() for pair in expected_data: self.assertIn(pair, should_be_in) self.assertNotIn(pair, should_not_be_in) # tag_repo_service_key = self._test_tag_repo_service_keys[11] (current, pending, to_be_pended, deleted) = pair_types_to_pools[content_type] test_rows = [] test_rows.append((CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, to_be_pended, HC.CONTENT_UPDATE_ADD)) test_rows.append((CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, random.sample(current, 3), HC.CONTENT_UPDATE_DELETE)) test_rows.append( (tag_repo_service_key, to_be_pended, HC.CONTENT_UPDATE_PEND)) test_rows.append((tag_repo_service_key, random.sample(current, 3), HC.CONTENT_UPDATE_PETITION)) for (service_key, data, action) in test_rows: source = ClientMigration.MigrationSourceList(self, data) run_test(source, service_key, action, data)