class PluginCallConfigurationTests(unittest.TestCase): def setUp(self): super(PluginCallConfigurationTests, self).setUp() self.override_config = {'a': 'a4', 'b': 'b4', 'e': 'e4'} self.repo_plugin_config = {'a': 'a3', 'c': 'c3'} self.plugin_config = {'a': 'a2', 'b': 'b2', 'd': 'd2'} self.default_config = {'a': 'a1'} self.config = PluginCallConfiguration(self.plugin_config, self.repo_plugin_config, self.override_config) self.config.default_config = self.default_config def test_flatten(self): # Test flattened = self.config.flatten() # Verify self.assertTrue(isinstance(flattened, dict)) self.assertEqual(5, len(flattened)) self.assertEqual(flattened['a'], 'a4') self.assertEqual(flattened['b'], 'b4') self.assertEqual(flattened['c'], 'c3') self.assertEqual(flattened['d'], 'd2') self.assertEqual(flattened['e'], 'e4')
class PluginCallConfigurationTests(unittest.TestCase): def setUp(self): super(PluginCallConfigurationTests, self).setUp() self.override_config = {'a': 'a4', 'b': 'b4', 'e': 'e4'} self.repo_plugin_config = {'a': 'a3', 'c': 'c3'} self.plugin_config = {'a': 'a2', 'b': 'b2', 'd': 'd2'} self.default_config = {'a': 'a1'} self.config = PluginCallConfiguration(self.plugin_config, self.repo_plugin_config, self.override_config) self.config.default_config = self.default_config def test_flatten(self): # Test flattened = self.config.flatten() # Verify self.assertTrue(isinstance(flattened, dict)) self.assertEqual(5, len(flattened)) self.assertEqual(flattened['a'], 'a4') self.assertEqual(flattened['b'], 'b4') self.assertEqual(flattened['c'], 'c3') self.assertEqual(flattened['d'], 'd2') self.assertEqual(flattened['e'], 'e4')
def test_import_unit_files_already_exist_size_mismatch( self, mock_get_working, *mocks): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) for fn in os.listdir(child_content): path = os.path.join(child_content, fn) if os.path.isdir(path): continue with open(path, 'w') as fp: fp.truncate() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import_unit_files_already_exist_size_mismatch(self, *mocks): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) for fn in os.listdir(child_content): path = os.path.join(child_content, fn) if os.path.isdir(path): continue with open(path, 'w') as fp: fp.truncate() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
def test_import(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) with mock_config.patch({'server': {'storage_dir': self.childfs}}): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
def test_import_unit_files_already_exist(self, *mocks): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import_unit_files_already_exist(self, mock_get_working, *mocks): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())