def _unit_inventory(self, request): """ Build the unit inventory. :param request: A synchronization request. :type request: SyncRequest :return: The built inventory. :rtype: UnitInventory """ # fetch child units try: conduit = NodesConduit() child_units = conduit.get_units(request.repo_id) except NodeError: raise except Exception: log.exception(request.repo_id) raise GetChildUnitsError(request.repo_id) # fetch parent units try: request.progress.begin_manifest_download() url = request.config.get(constants.MANIFEST_URL_KEYWORD) manifest = Manifest() manifest.fetch(url, request.working_dir, request.downloader) manifest.fetch_units(url, request.downloader) except NodeError: raise except Exception: log.exception(request.repo_id) raise GetParentUnitsError(request.repo_id) return UnitInventory(manifest, child_units)
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. Publishing is staged in a temporary directory and must use commit() to make the publishing permanent. :param units: A list of units to publish. :type units: iterable :return: The absolute path to the manifest. :rtype: str """ pathlib.mkdir(self.publish_dir) self.tmp_dir = mkdtemp(dir=self.publish_dir) units_path = pathlib.join(self.tmp_dir, UNITS_FILE_NAME) manifest_path = pathlib.join(self.tmp_dir, MANIFEST_FILE_NAME) with UnitWriter(units_path) as writer: for unit in units: self.publish_unit(unit) writer.add(unit) manifest_id = str(uuid4()) manifest = Manifest(manifest_id) manifest.set_units(writer) manifest_path = manifest.write(manifest_path) self.staged = True return manifest_path
def test_import_cached_manifest_missing_units(self, *unused): # Setup self.populate() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. Publishing is staged in a temporary directory and must use commit() to make the publishing permanent. :param units: A list of units to publish. :type units: iterable :return: The absolute path to the manifest. :rtype: str """ # make the parent dir and a temp dir within it parent_path = os.path.normpath(os.path.join(self.publish_dir, '../')) pathlib.mkdir(parent_path) self.tmp_dir = mkdtemp(dir=parent_path) with UnitWriter(self.tmp_dir) as writer: for unit in units: self.publish_unit(unit) writer.add(unit) manifest_id = str(uuid4()) manifest = Manifest(self.tmp_dir, manifest_id) manifest.units_published(writer) manifest.write() self.staged = True return manifest.path
def write_manifest(self, units): """ Write the manifest (units.json) for the specified list of units. :param units: A list of units. :type units: list :return: The absolute path to the written manifest file. :rtype: str """ manifest = Manifest() dir_path = join(self.publish_dir, self.repo_id) mkdir(dir_path) return manifest.write(dir_path, units)
def _parent_units(self): """ Fetch the list of units published by the parent nodes distributor. This is performed by reading the manifest at the URL defined in the configuration. :param repo_id: The repository ID. :type repo_id: str :return: A dictionary of units keyed by UnitKey. :rtype: dict """ url = self.config.get('manifest_url') manifest = Manifest() units = manifest.read(url, self.downloader) return unit_dictionary(units)
def test_write(self): # Test manifest = Manifest() units = [] for i in range(0, self.NUM_UNITS): units.append({i:i+1}) manifest.write(self.tmp_dir, units) # Verify path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) self.assertTrue(os.path.exists(path)) fp = gzip.open(path) s = fp.read() units_in = json.loads(s) self.verify(units, units_in)
def test_round_trip(self): # Test manifest = Manifest() units = [] for i in range(0, self.NUM_UNITS): units.append({i:i+1}) manifest.write(self.tmp_dir, units) cfg = DownloaderConfig('http') downloader = factory.get_downloader(cfg) manifest = Manifest() path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) url = 'file://%s' % path units_in = manifest.read(url, downloader) # Verify self.verify(units, units_in)
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. :param units: A list of units to publish. :type units: iterable :return: The path to the written manifest. :rtype: str """ manifest_path = super(self.__class__, self).publish(units) manifest = Manifest() manifest.read(manifest_path) manifest.publishing_details[constants.BASE_URL] = \ pathlib.url_join(self.base_url, self.alias[0], self.repo_id) manifest.write(manifest_path) return manifest_path
def _unit_inventory(self, request): """ Build the unit inventory. :param request: A synchronization request. :type request: SyncRequest :return: The built inventory. :rtype: UnitInventory """ # fetch child units try: conduit = NodesConduit() child_units = conduit.get_units(request.repo_id) except NodeError: raise except Exception: log.exception(request.repo_id) raise GetChildUnitsError(request.repo_id) # fetch parent units try: request.progress.begin_manifest_download() url = request.config.get(constants.MANIFEST_URL_KEYWORD) manifest = Manifest() manifest.fetch(url, request.working_dir, request.downloader) manifest.fetch_units(url, request.downloader) parent_units = manifest.get_units() except NodeError: raise except Exception: log.exception(request.repo_id) raise GetParentUnitsError(request.repo_id) return UnitInventory(parent_units, child_units)
def test_import_cached_manifest_matched(self, mock_get_working, mock_fetch, *unused): # Setup self.populate() mock_get_working.return_value = self.temp_dir with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Distributor.objects.delete() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME) manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER, Mock()) with mock_config.patch({'server': {'storage_dir': self.childfs}}): with patch('pulp_node.constants.CONTENT_PATH', self.parentfs): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) self.assertFalse(mock_fetch.called)
def test_read(self): # Setup units = [] for i in range(0, self.NUM_UNITS): units.append({i:i+1}) s = json.dumps(units) path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) fp = gzip.open(path, 'wb') fp.write(s) fp.close() # Test cfg = DownloaderConfig('http') downloader = factory.get_downloader(cfg) manifest = Manifest() path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) url = 'file://%s' % path units_in = manifest.read(url, downloader) # Verify self.verify(units, units_in)
def test_import_cached_manifest_matched(self, mock_fetch, *unused): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) Repo.get_collection().remove() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() units_path = os.path.join(os.path.dirname(manifest_path), UNITS_FILE_NAME) manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) shutil.copy(units_path, os.path.join(working_dir, UNITS_FILE_NAME)) # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit( self.REPO_ID, constants.HTTP_IMPORTER, RepoContentUnit.OWNER_TYPE_IMPORTER, constants.HTTP_IMPORTER) pulp_conf.set('server', 'storage_dir', self.childfs) importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS) self.assertFalse(mock_fetch.called)
def test_import_cached_manifest_units_invalid(self, *unused): # Setup self.populate() with mock_config.patch({'server': {'storage_dir': self.parentfs}}): dist = NodesHttpDistributor() working_dir = os.path.join(self.childfs, 'working_dir') os.makedirs(working_dir) repo = Repository(self.REPO_ID, working_dir) configuration = self.dist_conf() conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, configuration) model.Repository.drop_collection() RepoDistributor.get_collection().remove() RepoContentUnit.get_collection().remove() unit_db.clean() self.define_plugins() publisher = dist.publisher(repo, configuration) manifest_path = publisher.manifest_path() manifest = Manifest(manifest_path) manifest.read() shutil.copy(manifest_path, os.path.join(working_dir, MANIFEST_FILE_NAME)) with open(os.path.join(working_dir, UNITS_FILE_NAME), 'w+') as fp: fp.write('invalid-units') # Test importer = NodesHttpImporter() manifest_url = pathlib.url_join(publisher.base_url, manifest_path) configuration = { constants.MANIFEST_URL_KEYWORD: manifest_url, constants.STRATEGY_KEYWORD: constants.MIRROR_STRATEGY, } configuration = PluginCallConfiguration(configuration, {}) conduit = RepoSyncConduit(self.REPO_ID, constants.HTTP_IMPORTER) with mock_config.patch({'server': {'storage_dir': self.childfs}}): importer.sync_repo(repo, conduit, configuration) # Verify units = conduit.get_units() self.assertEquals(len(units), self.NUM_UNITS)
def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n path = os.path.join(self.unit_dir, fn) fp = open(path, 'w') fp.write(fn) fp.close() unit = {'type_id':'unit', 'unit_key':{'n':n}, 'storage_path':path} units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig('http') downloader = factory.get_downloader(conf) manifest_path = p.manifest_path() manifest = Manifest() url = 'file://'+manifest_path units = manifest.read(url, downloader) n = 0 for unit in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual(url.rsplit('/', 1)[0],'/'.join((base_url, publish_dir[1:], repo_id, 'content'))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_write(self): # Test manifest = Manifest() units = [] for i in range(0, self.NUM_UNITS): units.append({i: i + 1}) manifest.write(self.tmp_dir, units) # Verify path = os.path.join(self.tmp_dir, Manifest.FILE_NAME) self.assertTrue(os.path.exists(path)) fp = gzip.open(path) s = fp.read() fp.close() manifest = json.loads(s) units_in = [] for unit_file in manifest['unit_files']: path = os.path.join(self.tmp_dir, unit_file) fp = gzip.open(path) units_in.extend(json.load(fp)) fp.close() self.assertEqual(manifest['total_units'], self.NUM_UNITS) self.verify(units, units_in)
def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n relative_path = os.path.join(self.RELATIVE_PATH, fn) path = os.path.join(self.unit_dir, relative_path) fp = open(path, 'w') fp.write(fn) fp.close() unit = { 'type_id': 'unit', 'unit_key': { 'n': n }, 'storage_path': path, 'relative_path': relative_path } units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = 'file://' + manifest_path manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual( url, '/'.join((base_url, publish_dir[1:], repo_id, unit['relative_path']))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. :param units: A list of units to publish. :type units: iterable """ dir_path = join(self.publish_dir, self.repo_id) units_path = os.path.join(dir_path, UNITS_FILE_NAME) manifest_path = os.path.join(dir_path, MANIFEST_FILE_NAME) mkdir(dir_path) with UnitWriter(units_path) as writer: for unit in units: self.link_unit(unit) writer.add(unit) manifest_id = str(uuid4()) manifest = Manifest(manifest_id) manifest.set_units(writer) manifest_path = manifest.write(manifest_path) return manifest_path
def test_publisher(self): # setup units = self.populate() # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) with HttpPublisher(base_url, virtual_host, repo_id) as p: p.publish(units) p.commit() # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = pathlib.url_join(base_url, manifest_path) manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: if n == 0: self.assertTrue(unit[constants.PUBLISHED_AS_TARBALL]) else: self.assertFalse( unit.get(constants.PUBLISHED_AS_TARBALL, False)) path = pathlib.join(publish_dir, repo_id, unit[constants.RELATIVE_PATH]) self.assertEqual(manifest.publishing_details[constants.BASE_URL], pathlib.url_join(base_url, publish_dir, repo_id)) if n == 0: self.assertTrue(os.path.isfile(path)) else: self.assertTrue(os.path.islink(path)) if n == 0: tb = tarfile.open(path) try: files = sorted(tb.getnames()) finally: tb.close() self.assertEqual(len(files), self.NUM_TARED_FILES + 1) else: with open(path, 'rb') as fp: unit_content = fp.read() self.assertEqual(unit_content, unit_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_publish(self): # Setup self.populate() pulp_conf.set('server', 'storage_dir', self.parentfs) # Test dist = NodesHttpDistributor() repo = Repository(self.REPO_ID) conduit = RepoPublishConduit(self.REPO_ID, constants.HTTP_DISTRIBUTOR) dist.publish_repo(repo, conduit, self.dist_conf()) # Verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest = Manifest() pub = dist.publisher(repo, self.dist_conf()) url = '/'.join((pub.base_url, pub.manifest_path())) units = list(manifest.read(url, downloader)) self.assertEqual(len(units), self.NUM_UNITS) for n in range(0, self.NUM_UNITS): unit = units[n] created = self.units[n] for p, v in unit['metadata'].items(): if p.startswith('_'): continue self.assertEqual(created[p], v)
def publish(self, units): """ Publish the specified units. Writes the units.json file and symlinks each of the files associated to the unit.storage_path. :param units: A list of units to publish. :type units: iterable :return: The path to the written manifest. :rtype: str """ manifest_path = super(self.__class__, self).publish(units) manifest = Manifest(manifest_path) manifest.read() base_url = pathlib.url_join(self.base_url, self.alias[0], self.repo_id) details = {constants.BASE_URL: base_url} manifest.published(details) manifest.write() return manifest_path
def test_publisher(self): # setup units = self.populate() # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) with HttpPublisher(base_url, virtual_host, repo_id) as p: p.publish(units) p.commit() # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = pathlib.url_join(base_url, manifest_path) manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: if n == 0: self.assertTrue(unit[constants.PUBLISHED_AS_TARBALL]) else: self.assertFalse(unit.get(constants.PUBLISHED_AS_TARBALL, False)) path = pathlib.join(publish_dir, repo_id, unit[constants.RELATIVE_PATH]) self.assertEqual( manifest.publishing_details[constants.BASE_URL], pathlib.url_join(base_url, publish_dir, repo_id)) if n == 0: self.assertTrue(os.path.isfile(path)) else: self.assertTrue(os.path.islink(path)) if n == 0: tb = tarfile.open(path) try: files = sorted(tb.getnames()) finally: tb.close() self.assertEqual(len(files), self.NUM_TARED_FILES + 1) else: with open(path, 'rb') as fp: unit_content = fp.read() self.assertEqual(unit_content, unit_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def _unit_inventory(self, request): """ Build the unit inventory. :param request: A synchronization request. :type request: SyncRequest :return: The built inventory. :rtype: UnitInventory """ # fetch child units try: conduit = NodesConduit() child_units = conduit.get_units(request.repo_id) except NodeError: raise except Exception: _log.exception(request.repo_id) raise GetChildUnitsError(request.repo_id) # fetch parent units try: request.progress.begin_manifest_download() url = request.config.get(constants.MANIFEST_URL_KEYWORD) manifest = Manifest(request.working_dir) try: manifest.read() except IOError, e: if e.errno == errno.ENOENT: pass except ValueError: # json decoding failed pass fetched_manifest = RemoteManifest(url, request.downloader, request.working_dir) fetched_manifest.fetch() if manifest != fetched_manifest or \ not manifest.is_valid() or not manifest.has_valid_units(): fetched_manifest.write() fetched_manifest.fetch_units() manifest = fetched_manifest if not manifest.is_valid(): raise InvalidManifestError()
def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n relative_path = os.path.join(self.RELATIVE_PATH, fn) path = os.path.join(self.unit_dir, relative_path) fp = open(path, 'w') fp.write(fn) fp.close() unit = {'type_id':'unit', 'unit_key':{'n':n}, 'storage_path':path, 'relative_path':relative_path} units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = 'file://' + manifest_path manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual(url, '/'.join((base_url, publish_dir[1:], repo_id, unit['relative_path']))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1