def test_import_cached_manifest_missing_units(self, *unused): # Setup self.populate() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. :param units: A list of units to publish. :type units: iterable :return: The path to the written manifest. :rtype: str """ manifest_path = super(self.__class__, self).publish(units) manifest = Manifest() manifest.read(manifest_path) manifest.publishing_details[constants.BASE_URL] = \ pathlib.url_join(self.base_url, self.alias[0], self.repo_id) manifest.write(manifest_path) return manifest_path
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. :param units: A list of units to publish. :type units: iterable :return: The path to the written manifest. :rtype: str """ manifest_path = super(self.__class__, self).publish(units) manifest = Manifest() manifest.read(manifest_path) manifest.publishing_details[constants.BASE_URL] = \ pathlib.url_join(self.base_url, self.alias[0], self.repo_id) manifest.write(manifest_path) return manifest_path
def test_import_cached_manifest_matched(self, mock_get_working, mock_fetch, *unused): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME) manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) self.assertFalse(mock_fetch.called)
def test_import_cached_manifest_matched(self, mock_fetch, *unused): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME) manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) self.assertFalse(mock_fetch.called)
def _unit_inventory(self, request): """ Build the unit inventory. :param request: A synchronization request. :type request: SyncRequest :return: The built inventory. :rtype: UnitInventory """ # fetch child units try: conduit = NodesConduit() child_units = conduit.get_units(request.repo_id) except NodeError: raise except Exception: _log.exception(request.repo_id) raise GetChildUnitsError(request.repo_id) # fetch parent units try: request.progress.begin_manifest_download() url = request.config.get(constants.MANIFEST_URL_KEYWORD) manifest = Manifest(request.working_dir) try: manifest.read() except IOError, e: if e.errno == errno.ENOENT: pass except ValueError: # json decoding failed pass fetched_manifest = RemoteManifest(url, request.downloader, request.working_dir) fetched_manifest.fetch() if manifest != fetched_manifest or \ not manifest.is_valid() or not manifest.has_valid_units(): fetched_manifest.write() fetched_manifest.fetch_units() manifest = fetched_manifest if not manifest.is_valid(): raise InvalidManifestError()
def test_import_cached_manifest_units_invalid(self, *unused): # Setup self.populate() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp: fp.write('invalid-units') # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) with mock_config.patch({'server': {'storage_dir': self.childfs}}): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def _unit_inventory(self, request): """ Build the unit inventory. :param request: A synchronization request. :type request: SyncRequest :return: The built inventory. :rtype: UnitInventory """ # fetch child units try: conduit = NodesConduit() child_units = conduit.get_units(request.repo_id) except NodeError: raise except Exception: _log.exception(request.repo_id) raise GetChildUnitsError(request.repo_id) # fetch parent units try: request.progress.begin_manifest_download() url = request.config.get(constants.MANIFEST_URL_KEYWORD) manifest = Manifest(request.working_dir) try: manifest.read() except IOError, e: if e.errno == errno.ENOENT: pass except ValueError: # json decoding failed pass fetched_manifest = RemoteManifest(url, request.downloader, request.working_dir) fetched_manifest.fetch() if manifest != fetched_manifest or \ not manifest.is_valid() or not manifest.has_valid_units(): fetched_manifest.write() fetched_manifest.fetch_units() manifest = fetched_manifest if not manifest.is_valid(): raise InvalidManifestError()
def _parent_units(self): """ Fetch the list of units published by the parent nodes distributor. This is performed by reading the manifest at the URL defined in the configuration. :param repo_id: The repository ID. :type repo_id: str :return: A dictionary of units keyed by UnitKey. :rtype: dict """ url = self.config.get('manifest_url') manifest = Manifest() units = manifest.read(url, self.downloader) return unit_dictionary(units)
def test_round_trip(self): # Test manifest = Manifest() units = [] for i in range(0, self.NUM_UNITS): units.append({i:i+1}) manifest.write(self.tmp_dir, units) cfg = DownloaderConfig('http') downloader = factory.get_downloader(cfg) manifest = Manifest() path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) url = 'file://%s' % path units_in = manifest.read(url, downloader) # Verify self.verify(units, units_in)
def test_read(self): # Setup units = [] for i in range(0, self.NUM_UNITS): units.append({i:i+1}) s = json.dumps(units) path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) fp = gzip.open(path, 'wb') fp.write(s) fp.close() # Test cfg = DownloaderConfig('http') downloader = factory.get_downloader(cfg) manifest = Manifest() path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) url = 'file://%s' % path units_in = manifest.read(url, downloader) # Verify self.verify(units, units_in)
def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n path = os.path.join(self.unit_dir, fn) fp = open(path, 'w') fp.write(fn) fp.close() unit = {'type_id':'unit', 'unit_key':{'n':n}, 'storage_path':path} units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig('http') downloader = factory.get_downloader(conf) manifest_path = p.manifest_path() manifest = Manifest() url = 'file://'+manifest_path units = manifest.read(url, downloader) n = 0 for unit in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual(url.rsplit('/', 1)[0],'/'.join((base_url, publish_dir[1:], repo_id, 'content'))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_publish(self): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) # Test dist = NodesHttpDistributor() repo = Repository(self.REPO_ID) conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, self.dist_conf()) # Verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest = Manifest() pub = dist.publisher(repo, self.dist_conf()) url = '/'.join((pub.base_url, pub.manifest_path())) units = list(manifest.read(url, downloader)) self.assertEqual(len(units), self.NUM_UNITS) for n in range(0, self.NUM_UNITS): unit = units[n] created = self.units[n] for p, v in unit['metadata'].items(): if p.startswith('_'): continue self.assertEqual(created[p], v)