def test_drtfr_gpgkey(self): """Test creating a dnf .repo file from a baseurl Repo object with gpgkey""" self.assertEqual( dnf_repo_to_file_repo(self.dbo.repos.get("fake-repo-gpgkey")), self._read("gpgkey-test.repo"))
def test_drtfr_metalink(self): """Test creating a dnf .repo file from a metalink Repo object""" self.assertEqual( dnf_repo_to_file_repo(self.dbo.repos.get("fake-repo-metalink")), self._read("metalink-test.repo"))
def test_drtfr_mirrorlist(self): """Test creating a dnf .repo file from a mirrorlist Repo object""" self.assertEqual( dnf_repo_to_file_repo(self.dbo.repos.get("fake-repo-mirrorlist")), self._read("mirrorlist-test.repo"))
def v1_projects_source_new(): """Add a new package source. Or change an existing one **POST /api/v0/projects/source/new** Add (or change) a source for use when depsolving blueprints and composing images. The ``proxy`` and ``gpgkey_urls`` entries are optional. All of the others are required. The supported types for the urls are: * ``yum-baseurl`` is a URL to a yum repository. * ``yum-mirrorlist`` is a URL for a mirrorlist. * ``yum-metalink`` is a URL for a metalink. If ``check_ssl`` is true the https certificates must be valid. If they are self-signed you can either set this to false, or add your Certificate Authority to the host system. If ``check_gpg`` is true the GPG key must either be installed on the host system, or ``gpgkey_urls`` should point to it. You can edit an existing source (other than system sources), by doing a POST of the new version of the source. It will overwrite the previous one. Example:: { "id": "custom-source-1", "name": "Custom Package Source #1", "url": "https://url/path/to/repository/", "type": "yum-baseurl", "check_ssl": true, "check_gpg": true, "gpgkey_urls": [ "https://url/path/to/gpg-key" ] } In v0 the ``name`` field was used for the id (a short name for the repo). In v1 ``name`` changed to ``id`` and ``name`` is now used for the longer descriptive name of the repository. """ if request.headers['Content-Type'] == "text/x-toml": source = toml.loads(request.data) else: source = request.get_json(cache=False) # XXX TODO # Check for id in source, return error if not # Add test for that if "id" not in source: return jsonify(status=False, errors=[{ "id": UNKNOWN_SOURCE, "msg": "'id' field is missing from API v1 request." }]), 400 system_sources = get_repo_sources("/etc/yum.repos.d/*.repo") if source["id"] in system_sources: return jsonify(status=False, errors=[{ "id": SYSTEM_SOURCE, "msg": "%s is a system source, it cannot be changed." % source["id"] }]), 400 try: # Remove it from the RepoDict (NOTE that this isn't explicitly supported by the DNF API) with api.config["DNFLOCK"].lock: dbo = api.config["DNFLOCK"].dbo # If this repo already exists, delete it and replace it with the new one repos = list(r.id for r in dbo.repos.iter_enabled()) if source["id"] in repos: del dbo.repos[source["id"]] repo = source_to_repo(source, dbo.conf) dbo.repos.add(repo) log.info("Updating repository metadata after adding %s", source["id"]) dbo.fill_sack(load_system_repo=False) dbo.read_comps() # Write the new repo to disk, replacing any existing ones repo_dir = api.config["COMPOSER_CFG"].get("composer", "repo_dir") # Remove any previous sources with this id, ignore it if it isn't found try: delete_repo_source(joinpaths(repo_dir, "*.repo"), source["id"]) except ProjectsError: pass # Make sure the source id can't contain a path traversal by taking the basename source_path = joinpaths(repo_dir, os.path.basename("%s.repo" % source["id"])) with open(source_path, "w") as f: f.write(dnf_repo_to_file_repo(repo)) except Exception as e: log.error("(v0_projects_source_add) adding %s failed: %s", source["id"], str(e)) # Cleanup the mess, if loading it failed we don't want to leave it in memory repos = list(r.id for r in dbo.repos.iter_enabled()) if source["id"] in repos: with api.config["DNFLOCK"].lock: dbo = api.config["DNFLOCK"].dbo del dbo.repos[source["id"]] log.info("Updating repository metadata after adding %s failed", source["id"]) dbo.fill_sack(load_system_repo=False) dbo.read_comps() return jsonify(status=False, errors=[{ "id": PROJECTS_ERROR, "msg": str(e) }]), 400 return jsonify(status=True)
def test_drtfr_baseurl(self): """Test creating a dnf .repo file from a baseurl Repo object""" self.assertEqual( dnf_repo_to_file_repo(self.dbo.repos.get("fake-repo-baseurl")), self._read("baseurl-test.repo"))
def test_drtfr_gpgkey(self): """Test creating a dnf .repo file from a baseurl Repo object with gpgkey""" self.assertEqual(dnf_repo_to_file_repo(FakeRepoGPGKey()), fakerepo_gpgkey_str())
def test_drtfr_proxy(self): """Test creating a dnf .repo file from a baseurl Repo object with proxy""" self.assertEqual(dnf_repo_to_file_repo(FakeRepoProxy()), fakerepo_proxy_str())
def test_drtfr_mirrorlist(self): """Test creating a dnf .repo file from a mirrorlist Repo object""" self.assertEqual(dnf_repo_to_file_repo(FakeRepoMirrorlist()), fakerepo_mirrorlist_str())
def test_drtfr_metalink(self): """Test creating a dnf .repo file from a metalink Repo object""" self.assertEqual(dnf_repo_to_file_repo(FakeRepoMetalink()), fakerepo_metalink_str())
def test_drtfr_baseurl(self): """Test creating a dnf .repo file from a baseurl Repo object""" self.assertEqual(dnf_repo_to_file_repo(FakeRepoBaseUrl()), fakerepo_baseurl_str())