def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n relative_path = os.path.join(self.RELATIVE_PATH, fn) path = os.path.join(self.unit_dir, relative_path) fp = open(path, 'w') fp.write(fn) fp.close() unit = { 'type_id': 'unit', 'unit_key': { 'n': n }, 'storage_path': path, 'relative_path': relative_path } units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = 'file://' + manifest_path manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual( url, '/'.join((base_url, publish_dir[1:], repo_id, unit['relative_path']))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_unstage(self): # setup units = self.populate() # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) p.unstage() # verify self.assertFalse(os.path.exists(p.tmp_dir))
def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n relative_path = os.path.join(self.RELATIVE_PATH, fn) path = os.path.join(self.unit_dir, relative_path) fp = open(path, 'w') fp.write(fn) fp.close() unit = {'type_id':'unit', 'unit_key':{'n':n}, 'storage_path':path, 'relative_path':relative_path} units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = 'file://' + manifest_path manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual(url, '/'.join((base_url, publish_dir[1:], repo_id, unit['relative_path']))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_publisher(self): # setup units = self.populate() # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) with HttpPublisher(base_url, virtual_host, repo_id) as p: p.publish(units) p.commit() # verify conf = DownloaderConfig() downloader = LocalFileDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) url = pathlib.url_join(base_url, manifest_path) manifest = RemoteManifest(url, downloader, working_dir) manifest.fetch() manifest.fetch_units() self.assertTrue(manifest.has_valid_units()) units = manifest.get_units() n = 0 for unit, ref in units: self.assertEqual(manifest.publishing_details[constants.BASE_URL], pathlib.url_join(base_url, publish_dir, repo_id)) if n == 0: # TARBALL path = pathlib.join(publish_dir, repo_id, unit[constants.TARBALL_PATH]) self.assertTrue(os.path.isfile(path)) else: path = pathlib.join(publish_dir, repo_id, unit[constants.RELATIVE_PATH]) self.assertTrue(os.path.islink(path)) self.assertEqual(unit[constants.FILE_SIZE], os.path.getsize(path)) if n == 0: # TARBALL path = pathlib.join(publish_dir, repo_id, unit[constants.TARBALL_PATH]) tb = tarfile.open(path) try: files = sorted(tb.getnames()) finally: tb.close() self.assertEqual(len(files), self.NUM_TARED_FILES) else: path = pathlib.join(publish_dir, repo_id, unit[constants.RELATIVE_PATH]) with open(path, 'rb') as fp: unit_content = fp.read() self.assertEqual(unit_content, unit_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_publisher(self): # setup units = [] for n in range(0, 3): fn = 'test_%d' % n path = os.path.join(self.unit_dir, fn) fp = open(path, 'w') fp.write(fn) fp.close() unit = {'type_id':'unit', 'unit_key':{'n':n}, 'storage_path':path} units.append(unit) # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) p = HttpPublisher(base_url, virtual_host, repo_id) p.publish(units) # verify conf = DownloaderConfig('http') downloader = factory.get_downloader(conf) manifest_path = p.manifest_path() manifest = Manifest() url = 'file://'+manifest_path units = manifest.read(url, downloader) n = 0 for unit in units: file_content = 'test_%d' % n _download = unit['_download'] url = _download['url'] self.assertEqual(url.rsplit('/', 1)[0],'/'.join((base_url, publish_dir[1:], repo_id, 'content'))) path = url.split('//', 1)[1] self.assertTrue(os.path.islink(path)) f = open(path) s = f.read() f.close() self.assertEqual(s, file_content) self.assertEqual(unit['unit_key']['n'], n) n += 1
def test_exit(self): # setup units = self.populate() # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) with HttpPublisher(base_url, virtual_host, repo_id) as p: p.publish(units) # verify self.assertFalse(os.path.exists(p.tmp_dir))
def publisher(self, repo, config): """ Get a configured publisher. :param repo: A repository. :type repo: pulp.plugins.model.Repository :param config: plugin configuration :type config: pulp.plugins.config.PluginConfiguration :return: The configured publisher. """ protocol = config.get(constants.PROTOCOL_KEYWORD) host = pulp_conf.get('server', 'server_name') section = config.get(protocol) alias = section.get('alias') base_url = '://'.join((protocol, host)) return HttpPublisher(base_url, alias, repo.id)
def test_publisher(self): # setup units = self.populate() # test # publish repo_id = 'test_repo' base_url = 'file://' publish_dir = os.path.join(self.tmpdir, 'nodes/repos') virtual_host = (publish_dir, publish_dir) with HttpPublisher(base_url, virtual_host, repo_id) as p: p.publish(units) p.commit() # verify conf = DownloaderConfig() downloader = HTTPSCurlDownloader(conf) manifest_path = p.manifest_path() working_dir = os.path.join(self.tmpdir, 'working_dir') os.makedirs(working_dir) manifest = Manifest() url = pathlib.url_join(base_url, manifest_path) manifest.fetch(url, working_dir, downloader) manifest.fetch_units(url, downloader) units = manifest.get_units() n = 0 for unit, ref in units: if n == 0: self.assertTrue(unit[constants.PUBLISHED_AS_TARBALL]) else: self.assertFalse( unit.get(constants.PUBLISHED_AS_TARBALL, False)) path = pathlib.join(publish_dir, repo_id, unit[constants.RELATIVE_PATH]) self.assertEqual(manifest.publishing_details[constants.BASE_URL], pathlib.url_join(base_url, publish_dir, repo_id)) if n == 0: self.assertTrue(os.path.isfile(path)) else: self.assertTrue(os.path.islink(path)) if n == 0: with tarfile.open(path) as tb: files = sorted(tb.getnames()) self.assertEqual(len(files), self.NUM_TARED_FILES + 1) else: with open(path, 'rb') as fp: unit_content = fp.read() self.assertEqual(unit_content, unit_content) self.assertEqual(unit['unit_key']['n'], n) n += 1