def test_drop_indexes(self): """ Tests updating indexes on an existing collection with different indexes correctly changes them. """ # Setup old_key = ['compound_1', 'compound_2'] type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', old_key, None, []) types_db._update_unit_key(type_def) # Test new_key = ['new_1'] type_def.unit_key = new_key types_db._drop_indexes(type_def) types_db._update_unit_key(type_def) # Verify collection_name = types_db.unit_collection_name(type_def.id) collection = pulp_db.get_collection(collection_name) index_dict = collection.index_information() self.assertEqual(2, len(index_dict)) # default (_id) + new one
def test_update_missing_no_error(self): """ Tests that updating a previously loaded database with some missing definitions does not throw an error. """ # Setup defs = [DEF_1, DEF_2, DEF_3] types_db.update_database(defs) # Test new_defs = [DEF_4] types_db.update_database(new_defs) # Verify all_collection_names = types_db.all_type_collection_names() self.assertEqual(len(defs) + len(new_defs), len(all_collection_names)) # old are not deleted for d in defs: self.assertTrue( types_db.unit_collection_name(d.id) in all_collection_names) # Quick sanity check on the indexes collection = types_db.type_units_collection(d.id) all_indexes = collection.index_information() total_index_count = 1 + 1 + len( d.search_indexes) # _id + unit key + all search self.assertEqual(total_index_count, len(all_indexes))
def test_update_missing_no_error(self): """ Tests that updating a previously loaded database with some missing definitions does not throw an error. """ # Setup defs = [DEF_1, DEF_2, DEF_3] types_db.update_database(defs) # Test new_defs = [DEF_4] types_db.update_database(new_defs) # Verify all_collection_names = types_db.all_type_collection_names() self.assertEqual(len(defs) + len(new_defs), len(all_collection_names)) # old are not deleted for d in defs: self.assertTrue(types_db.unit_collection_name(d.id) in all_collection_names) # Quick sanity check on the indexes collection = types_db.type_units_collection(d.id) all_indexes = collection.index_information() total_index_count = 1 + 1 + len(d.search_indexes) # _id + unit key + all search self.assertEqual(total_index_count, len(all_indexes))
def test_create_or_update_existing_type_collection(self): """ Tests calling create_or_update with a change to an existing type collection is successful. """ # Setup type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], []) types_db._create_or_update_type(type_def) # Test type_def.display_name = 'new-name' type_def.description = 'new-description' type_def.unit_key = 'new-key' type_def.search_indexes = None types_db._create_or_update_type(type_def) # Verify # Present in types collection all_types = list(ContentType.get_collection().find()) self.assertEqual(1, len(all_types)) found = all_types[0] self.assertEqual(type_def.id, found['id']) self.assertEqual(type_def.display_name, found['display_name']) self.assertEqual(type_def.description, found['description']) self.assertEqual(type_def.unit_key, found['unit_key']) self.assertEqual(type_def.search_indexes, found['search_indexes']) # Type collection exists collection_name = types_db.unit_collection_name(type_def.id) self.assertTrue( collection_name in pulp_db.database().collection_names())
def test_create_or_update_existing_type_collection(self): """ Tests calling create_or_update with a change to an existing type collection is successful. """ # Setup type_def = TypeDefinition("rpm", "RPM", "RPM Packages", ["name"], ["name"], []) types_db._create_or_update_type(type_def) # Test type_def.display_name = "new-name" type_def.description = "new-description" type_def.unit_key = "new-key" type_def.search_indexes = None types_db._create_or_update_type(type_def) # Verify # Present in types collection all_types = list(ContentType.get_collection().find()) self.assertEqual(1, len(all_types)) found = all_types[0] self.assertEqual(type_def.id, found["id"]) self.assertEqual(type_def.display_name, found["display_name"]) self.assertEqual(type_def.description, found["description"]) self.assertEqual(type_def.unit_key, found["unit_key"]) self.assertEqual(type_def.search_indexes, found["search_indexes"]) # Type collection exists collection_name = types_db.unit_collection_name(type_def.id) self.assertTrue(collection_name in pulp_db.get_database().collection_names())
def test_update_unit_key_single_field(self): """ Tests a single field unit key is handled correctly. """ # Setup unit_key = 'individual_1', type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', unit_key, None, []) # Test types_db._update_unit_key(type_def) # Verify collection_name = types_db.unit_collection_name(type_def.id) collection = pulp_db.get_collection(collection_name) index_dict = collection.index_information() self.assertEqual(2, len(index_dict)) # default (_id) + unit key index = index_dict['individual_1_1'] self.assertTrue(index['unique']) keys = index['key'] self.assertEqual(1, len(keys)) self.assertEqual('individual_1', keys[0][0]) self.assertEqual(types_db.ASCENDING, keys[0][1])
def test_update_unit_key_multiple_fields(self): """ Tests that a multiple field unit key is built as a single, compound index """ # Setup unit_key = ['compound_1', 'compound_2'] type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', unit_key, None, []) # Test types_db._update_unit_key(type_def) # Verify collection_name = types_db.unit_collection_name(type_def.id) collection = pulp_db.get_collection(collection_name) index_dict = collection.index_information() self.assertEqual(2, len(index_dict)) # default (_id) + unit key index = index_dict['compound_1_1_compound_2_1'] self.assertTrue(index['unique']) keys = index['key'] self.assertEqual(2, len(keys)) self.assertEqual('compound_1', keys[0][0]) self.assertEqual(types_db.ASCENDING, keys[0][1]) self.assertEqual('compound_2', keys[1][0]) self.assertEqual(types_db.ASCENDING, keys[1][1])
def test_create_or_update_existing_type_collection(self): """ Tests calling create_or_update with a change to an existing type collection is successful. """ # Setup type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], []) types_db._create_or_update_type(type_def) # Test type_def.display_name = 'new-name' type_def.description = 'new-description' type_def.unit_key = 'new-key' type_def.search_indexes = None types_db._create_or_update_type(type_def) # Verify # Present in types collection all_types = list(ContentType.get_collection().find()) self.assertEqual(1, len(all_types)) found = all_types[0] self.assertEqual(type_def.id, found['id']) self.assertEqual(type_def.display_name, found['display_name']) self.assertEqual(type_def.description, found['description']) self.assertEqual(type_def.unit_key, found['unit_key']) self.assertEqual(type_def.search_indexes, found['search_indexes']) # Type collection exists collection_name = types_db.unit_collection_name(type_def.id) self.assertTrue(collection_name in pulp_db.get_database().collection_names())
def test_create_or_update_type_collection(self): """ Tests the call to create a new type collection works. """ # Setup type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], []) # Test types_db._create_or_update_type(type_def) # Verify # Present in types collection all_types = list(ContentType.get_collection().find()) self.assertEqual(1, len(all_types)) found = all_types[0] self.assertEqual(type_def.id, found['id']) self.assertEqual(type_def.display_name, found['display_name']) self.assertEqual(type_def.description, found['description']) self.assertEqual(type_def.unit_key, found['unit_key']) self.assertEqual(type_def.search_indexes, found['search_indexes']) # Type collection exists collection_name = types_db.unit_collection_name(type_def.id) self.assertTrue(collection_name in pulp_db.get_database().collection_names())
def test_update_no_changes(self): """ Tests the common use case of loading type definitions that have been loaded already and have not changed. """ # Setup defs = [DEF_1, DEF_2, DEF_3, DEF_4] types_db.update_database(defs) # Test same_defs = [ DEF_4, DEF_3, DEF_2, DEF_1 ] # no real reason for this, just felt better than using the previous list types_db.update_database(same_defs) # Verify all_collection_names = types_db.all_type_collection_names() self.assertEqual(len(same_defs), len(all_collection_names)) for d in defs: self.assertTrue( types_db.unit_collection_name(d.id) in all_collection_names) # Quick sanity check on the indexes collection = types_db.type_units_collection(d.id) all_indexes = collection.index_information() total_index_count = 1 + 1 + len( d.search_indexes) # _id + unit key + all search self.assertEqual(total_index_count, len(all_indexes))
def test_create_or_update_type_collection(self): """ Tests the call to create a new type collection works. """ # Setup type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], []) # Test types_db._create_or_update_type(type_def) # Verify # Present in types collection all_types = list(ContentType.get_collection().find()) self.assertEqual(1, len(all_types)) found = all_types[0] self.assertEqual(type_def.id, found['id']) self.assertEqual(type_def.display_name, found['display_name']) self.assertEqual(type_def.description, found['description']) self.assertEqual(type_def.unit_key, found['unit_key']) self.assertEqual(type_def.search_indexes, found['search_indexes']) # Type collection exists collection_name = types_db.unit_collection_name(type_def.id) self.assertTrue( collection_name in pulp_db.database().collection_names())
def test_update_no_changes(self): """ Tests the common use case of loading type definitions that have been loaded already and have not changed. """ # Setup defs = [DEF_1, DEF_2, DEF_3, DEF_4] types_db.update_database(defs) # Test same_defs = [DEF_4, DEF_3, DEF_2, DEF_1] # no real reason for this, just felt better than using the previous list types_db.update_database(same_defs) # Verify all_collection_names = types_db.all_type_collection_names() self.assertEqual(len(same_defs), len(all_collection_names)) for d in defs: self.assertTrue(types_db.unit_collection_name(d.id) in all_collection_names) # Quick sanity check on the indexes collection = types_db.type_units_collection(d.id) all_indexes = collection.index_information() total_index_count = 1 + 1 + len(d.search_indexes) # _id + unit key + all search self.assertEqual(total_index_count, len(all_indexes))
def test_import_modified_units(self, mock_get_working, *mocks): # Setup self.populate() mock_get_working.return_value = self.temp_dir max_concurrency = 5 max_bandwidth = 12345 with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) # make the published unit have a newer _last_updated. collection = connection.get_collection( unit_db.unit_collection_name(self.UNIT_TYPE_ID)) # N=0 (no file) unit = collection.find_one({'N': 0}) unit['age'] = 84 # this will be updated back to 42. unit['_last_updated'] -= 1 unit['_storage_path'] = None collection.update({'N': 0}, unit) # N=1 unit = collection.find_one({'N': 1}) unit['age'] = 85 # this will be updated back to 42. unit['_last_updated'] -= 1 collection.update({'N': 1}, unit) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify unit = collection.find_one({'N': 0}) self.assertEqual(unit['age'], 42) unit = collection.find_one({'N': 1}) self.assertEqual(unit['age'], 42)
def test_all_type_collection_names(self): """ Tests listing all type collections. """ # Setup type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', ['name'], ['name'], []) types_db._create_or_update_type(type_def) # Test all_names = types_db.all_type_collection_names() # Verify self.assertEqual(1, len(all_names)) self.assertEqual(types_db.unit_collection_name(type_def.id), all_names[0])
def test_import_modified_units(self, mock_get_working, *mocks): # Setup self.populate() mock_get_working.return_value = self.temp_dir max_concurrency = 5 max_bandwidth = 12345 with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) # make the published unit have a newer _last_updated. collection = connection.get_collection(unit_db.unit_collection_name(self.UNIT_TYPE_ID)) # N=0 (no file) unit = collection.find_one({'N': 0}) unit['age'] = 84 # this will be updated back to 42. unit['_last_updated'] -= 1 unit['_storage_path'] = None collection.update({'N': 0}, unit) # N=1 unit = collection.find_one({'N': 1}) unit['age'] = 85 # this will be updated back to 42. unit['_last_updated'] -= 1 collection.update({'N': 1}, unit) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify unit = collection.find_one({'N': 0}) self.assertEqual(unit['age'], 42) unit = collection.find_one({'N': 1}) self.assertEqual(unit['age'], 42)
def test_update_search_indexes(self): """ Tests that the unique index creation on a new collection is successful. This will test both single key and compound indexes to ensure mongo handles them successfully. """ # Setup search_indexes = [ ['compound_1', 'compound_2'], 'individual_1' ] type_def = TypeDefinition('rpm', 'RPM', 'RPM Packages', None, search_indexes, []) # Test types_db._update_search_indexes(type_def) # Verify collection_name = types_db.unit_collection_name(type_def.id) collection = pulp_db.get_collection(collection_name) index_dict = collection.index_information() self.assertEqual(3, len(index_dict)) # default (_id) + definition ones # Verify individual index index = index_dict['individual_1_1'] keys = index['key'] self.assertEqual(1, len(keys)) self.assertEqual('individual_1', keys[0][0]) self.assertEqual(types_db.ASCENDING, keys[0][1]) # Verify compound index index = index_dict['compound_1_1_compound_2_1'] keys = index['key'] self.assertEqual(2, len(keys)) self.assertEqual('compound_1', keys[0][0]) self.assertEqual(types_db.ASCENDING, keys[0][1]) self.assertEqual('compound_2', keys[1][0]) self.assertEqual(types_db.ASCENDING, keys[1][1])
def test_import_modified_units(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) # make the published unit have a newer _last_updated. collection = connection.get_collection( unit_db.unit_collection_name(self.UNIT_TYPE_ID)) unit = collection.find_one({'N': 0}) unit['age'] = 84 unit['_last_updated'] -= 1 collection.update({'N': 0}, unit, safe=True) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify unit = collection.find_one({'N': 0}) self.assertEqual(unit['age'], 42)
def test_import_modified_units(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) # make the published unit have a newer _last_updated. collection = connection.get_collection(unit_db.unit_collection_name(self.UNIT_TYPE_ID)) unit = collection.find_one({'N': 0}) unit['age'] = 84 unit['_last_updated'] -= 1 collection.update({'N': 0}, unit, safe=True) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify unit = collection.find_one({'N': 0}) self.assertEqual(unit['age'], 42)
def test_update_clean_database(self): """ Tests calling update on a completely clean types database. """ # Test defs = [DEF_1, DEF_2, DEF_3, DEF_4] types_db.update_database(defs) # Verify all_collection_names = types_db.all_type_collection_names() self.assertEqual(len(defs), len(all_collection_names)) for d in defs: self.assertTrue(types_db.unit_collection_name(d.id) in all_collection_names) # Quick sanity check on the indexes collection = types_db.type_units_collection(d.id) all_indexes = collection.index_information() total_index_count = 1 + 1 + len(d.search_indexes) # _id + unit key + all search self.assertEqual(total_index_count, len(all_indexes))