def clean(self): Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def init_types(self): database.clean() fp = open(self.TYPES_PATH) td = TypeDescriptor(os.path.basename(self.TYPES_PATH), fp.read()) fp.close() definitions = parser.parse([td]) database.update_database(definitions)
def test_types(self): """ Tests retrieving all types in the database. """ # Setup types_db.clean() type_def_1 = TypeDefinition('type-1', 'Type 1', 'Type 1', [], [], []) type_def_2 = TypeDefinition('type-2', 'Type 2', 'Type 2', [], [], []) types_db._create_or_update_type(type_def_1) types_db._create_or_update_type(type_def_2) # Test found_defs = self.manager.types() # Verify self.assertEqual(2, len(found_defs)) for type_def in [type_def_1, type_def_2]: found_def = [t for t in found_defs if t['id'] == type_def.id][0] self.assertEqual(found_def['id'], type_def.id) self.assertEqual(found_def['display_name'], type_def.display_name) self.assertEqual(found_def['description'], type_def.description) self.assertEqual(found_def['unit_key'], type_def.unit_key) self.assertEqual(found_def['search_indexes'], type_def.search_indexes) self.assertEqual(found_def['referenced_types'], type_def.referenced_types)
def test_import_cached_manifest_missing_units(self, *unused): # Setup self.populate() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def test_import(self): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() repo = Repository(self.REPO_ID) cfg = { 'protocol':'file', 'http':{'alias':self.alias}, 'https':{'alias':self.alias}, 'file':{'alias':self.alias}, } conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = 'file://' + publisher.manifest_path() cfg = dict(manifest_url=manifest_url, strategy=constants.MIRROR_STRATEGY) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) importer.sync_repo(repo, conduit, cfg) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def setUp(self): super(QueryTests, self).setUp() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() plugin_api._create_manager() plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
def clean(self): super(RepoSyncConduitTests, self).clean() types_database.clean() mock_plugins.reset() RepoContentUnit.get_collection().remove() Repo.get_collection().remove()
def tearDown(self): PulpRPMTests.tearDown(self) Consumer.get_collection().remove() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() RepoDistributor.get_collection().remove() database.clean() plugins.finalize()
def tearDown(self): super(Migration0004Tests, self).tearDown() # Delete any sample data added for the test types_db.clean() RepoContentUnit.get_collection().remove() RepoImporter.get_collection().remove() Repo.get_collection().remove()
def tearDown(self): super(Migration0004Tests, self).tearDown() # Delete any sample data added for the test types_db.clean() RepoContentUnit.get_collection().remove() get_collection('repo_importers').remove() model.Repository.drop_collection()
def setUp(self): super(PluginControllerTests, self).setUp() plugin_api._create_manager() types_db.clean() # Configure content manager plugin_api._MANAGER.importers.add_plugin('MockImporter', MockImporter, {}) plugin_api._MANAGER.distributors.add_plugin('MockDistributor', MockDistributor, {})
def clean(self): super(DependencyManagerTests, self).clean() database.clean() Repo.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() mock_plugins.MOCK_IMPORTER.resolve_dependencies.return_value = None
def tearDown(self): super(BaseProfilerConduitTests, self).tearDown() Consumer.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() typedb.clean() factory.reset()
def setUp(self): super(QueryTests, self).setUp() Repo.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() for type_id in ALL_TYPES: unit_db.type_definition = Mock(return_value=dict(id=type_id, unit_key=UNIT_METADATA)) unit_db.type_units_unit_key = Mock(return_value=['A', 'B', 'C', 'N']) plugin_api._create_manager() plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, {})
def tearDown(self): WebTest.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean()
def tearDown(self): ServerTests.tearDown(self) shutil.rmtree(self.parentfs) shutil.rmtree(self.childfs) Consumer.get_collection().remove() Bind.get_collection().remove() model.Repository.objects.delete() model.Distributor.objects.delete() model.Importer.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean()
def clean(self, units_only=False, plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if units_only: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if plugins: plugin_api._MANAGER.distributors.plugins = {}
def tearDown(self): super(BaseProfilerConduitTests, self).tearDown() Consumer.get_collection().remove() model.Repository.objects.delete() model.Distributor.objects.delete() Bind.get_collection().remove() RepoContentUnit.get_collection().remove() UnitProfile.get_collection().remove() typedb.clean() factory.reset() mock_plugins.reset()
def clean(self, just_units=False, purge_plugins=False): RepoContentUnit.get_collection().remove() unit_db.clean() if just_units: return Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() if purge_plugins: plugin_api._MANAGER.importers.plugins = {} plugin_api._MANAGER.distributors.plugins = {}
def test_types_no_types(self): """ Tests an empty list is returned when no types are loaded. """ # Setup types_db.clean() # Test found_defs = self.manager.types() # Verify self.assertTrue(isinstance(found_defs, list)) self.assertEqual(0, len(found_defs))
def test_import_unit_files_already_exist_size_mismatch(self, *mocks): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) for fn in os.listdir(child_content): path = os.path.join(child_content, fn) if os.path.isdir(path): continue with open(path, 'w') as fp: fp.truncate() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import_unit_files_already_exist_size_mismatch(self, *mocks): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) for fn in os.listdir(child_content): path = os.path.join(child_content, fn) if os.path.isdir(path): continue with open(path, 'w') as fp: fp.truncate() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
def test_import_cached_manifest_matched(self, mock_get_working, mock_fetch, *unused): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME) manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) self.assertFalse(mock_fetch.called)
def setUp(self): WebTest.setUp(self) self.upfs = self.tmpdir('upstream-') self.downfs = self.tmpdir('downstream-') self.alias = (self.upfs, self.upfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() plugin_api._create_manager() plugin_api._MANAGER.importers.add_plugin(CITRUS_IMPORTER, CitrusHttpImporter, {}) plugin_api._MANAGER.distributors.add_plugin(CITRUS_DISTRUBUTOR, CitrusHttpDistributor, {}) unit_db.type_definition = \ Mock(return_value=dict(id=self.TYPEDEF_ID, unit_key=self.UNIT_METADATA)) unit_db.type_units_unit_key = \ Mock(return_value=['A', 'B', 'N'])
def setUp(self): WebTest.setUp(self) self.parentfs = self.tmpdir('parent-') self.childfs = self.tmpdir('child-') self.alias = (self.parentfs, self.parentfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() plugin_api._create_manager() imp_conf = dict(strategy=constants.MIRROR_STRATEGY) plugin_api._MANAGER.importers.add_plugin(constants.HTTP_IMPORTER, NodesHttpImporter, imp_conf) plugin_api._MANAGER.distributors.add_plugin(constants.HTTP_DISTRIBUTOR, NodesHttpDistributor, {}) plugin_api._MANAGER.distributors.add_plugin(FAKE_DISTRIBUTOR, FakeDistributor, FAKE_DISTRIBUTOR_CONFIG) plugin_api._MANAGER.profilers.add_plugin(constants.PROFILER_ID, NodeProfiler, {})
def test_import(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) with mock_config.patch({'server': {'storage_dir': self.childfs}}): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import_cached_manifest_matched(self, mock_fetch, *unused): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME) manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) self.assertFalse(mock_fetch.called)
def setUp(self): WebTest.setUp(self) self.parentfs = self.tmpdir('parent-') self.childfs = self.tmpdir('child-') self.alias = (self.parentfs, self.parentfs) Consumer.get_collection().remove() Bind.get_collection().remove() Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoImporter.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() plugin_api._create_manager() plugin_api._MANAGER.importers.add_plugin(HTTP_IMPORTER, NodesHttpImporter, {}) plugin_api._MANAGER.distributors.add_plugin(HTTP_DISTRIBUTOR, NodesHttpDistributor, {}) plugin_api._MANAGER.distributors.add_plugin(FAKE_DISTRIBUTOR, FakeDistributor, {}) unit_db.type_definition = \ Mock(return_value=dict(id=self.TYPEDEF_ID, unit_key=self.UNIT_METADATA)) unit_db.type_units_unit_key = \ Mock(return_value=['A', 'B', 'N'])
def test_import_cached_manifest_units_invalid(self, *unused): # Setup self.populate() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp: fp.write('invalid-units') # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) with mock_config.patch({'server': {'storage_dir': self.childfs}}): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def test_import(self, *mocks): # Setup self.populate() max_concurrency = 5 max_bandwidth = 12345 pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, importer_constants.KEY_MAX_DOWNLOADS: max_concurrency, importer_constants.KEY_MAX_SPEED: max_bandwidth, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
def test_import_unit_files_already_exist(self, mock_get_working, *mocks): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with( configuration.flatten())
def test_import_unit_files_already_exist(self, mock_get_working, *mocks): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) cfg = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, cfg) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() parent_content = os.path.join(self.parentfs, 'content') child_content = os.path.join(self.childfs, 'content') shutil.copytree(parent_content, child_content) # Test importer = NodesHttpImporter() publisher = dist.publisher(repo, cfg) manifest_url = pathlib.url_join(publisher.base_url, publisher.manifest_path()) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) mock_importer_config_to_nectar_config = mocks[0] mock_importer_config_to_nectar_config.assert_called_with(configuration.flatten())
def clean(self): super(GetUnitsByTypeStressTest, self).clean() database.clean() RepoContentUnit.get_collection().remove()
def clean(self): super(RepoUnitAssociationManagerTests, self).clean() database.clean() RepoContentUnit.get_collection().remove() RepoImporter.get_collection().remove() me_model.Repository.drop_collection()
def clean(self): super(UnitAssociationQueryTests, self).clean() database.clean() RepoContentUnit.get_collection().remove()
def clean(self): super(ImporterScratchPadMixinTests, self).clean() types_database.clean() model.Repository.drop_collection()
def tearDown(self): super(RepoSyncConduitTests, self).tearDown() types_database.clean() mock_plugins.reset()
def tearDown(self): super(QueryTests, self).tearDown() model.Repository.drop_collection() RepoContentUnit.get_collection().remove() unit_db.clean()
def clean(self): super(DistributorScratchpadMixinTests, self).clean() types_database.clean() model.Repository.objects.delete()
def clean(self): super(DistributorScratchpadMixinTests, self).clean() types_database.clean() model.Repository.drop_collection()
def clean(self): super(RepoUnitAssociationManagerTests, self).clean() database.clean() RepoContentUnit.get_collection().remove() me_model.Repository.objects.delete() me_model.Importer.objects.delete()
def clean(self): super(TypesDatabaseTests, self).clean() types_db.clean()
def clean(self): super(PulpContentTests, self).clean() database.clean()
def clean(self): super(ImporterScratchPadMixinTests, self).clean() types_database.clean() Repo.get_collection().remove()
def clean(self): super(DistributorScratchpadMixinTests, self).clean() types_database.clean() Repo.get_collection().remove()
def clean(self): super(RepoUnitAssociationManagerTests, self).clean() database.clean() RepoContentUnit.get_collection().remove() RepoImporter.get_collection().remove() Repo.get_collection().remove()
def clean(self): super(self.__class__, self).clean() types_db.clean()
def tearDown(self): super(OrphanManagerTests, self).tearDown() RepoContentUnit.get_collection().remove() content_type_db.clean() if os.path.exists(self.content_root): # can be removed by delete operations shutil.rmtree(self.content_root)