def setUp(self): self.tempdir = tempfile.mkdtemp() self.pathinfo = koji.PathInfo(self.tempdir) mock.patch('koji.pathinfo', new=self.pathinfo).start() self.hostcalls = kojihub.HostExports() self.context = mock.patch('kojihub.context').start() mock.patch('kojihub.Host').start() self.Task = mock.patch('kojihub.Task').start() self.Task.return_value.assertHost = mock.MagicMock() self.get_build = mock.patch('kojihub.get_build').start() mock.patch('kojihub.get_rpm', new=self.my_get_rpm).start() self.get_image_build = mock.patch('kojihub.get_image_build').start() mock.patch('kojihub.get_archive_type', new=self.my_get_archive_type).start() mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start() mock.patch.object(kojihub.BuildRoot, 'load', new=self.my_buildroot_load).start() mock.patch('kojihub.import_archive_internal', new=self.my_import_archive_internal).start() self._dml = mock.patch('kojihub._dml').start() mock.patch('kojihub.build_notification').start() mock.patch('kojihub.assert_policy').start() mock.patch('kojihub.check_volume_policy', return_value={'id':0, 'name': 'DEFAULT'}).start() self.set_up_callbacks() self.rpms = {} self.inserts = [] self.updates = [] mock.patch.object(kojihub.InsertProcessor, 'execute', new=make_insert_grabber(self)).start() mock.patch.object(kojihub.BulkInsertProcessor, '_one_insert', new=make_bulk_insert_grabber(self)).start() mock.patch.object(kojihub.UpdateProcessor, 'execute', new=make_update_grabber(self)).start() mock.patch('kojihub.nextval', new=self.my_nextval).start() self.sequences = {}
def importBuild(rpms, buildinfo, tag=None): '''import a build from remote hub''' for rpminfo in rpms: if rpminfo['arch'] == 'src': srpm = rpminfo pathinfo = koji.PathInfo(PACKAGEURL) build_url = pathinfo.build(buildinfo) url = "%s/%s" % (pathinfo.build(buildinfo), pathinfo.rpm(srpm)) fname = "%s.src.rpm" % buildinfo['nvr'] try: _importURL(url, fname) except: logging.error("Importing %s failed" % fname) return False else: for rpminfo in rpms: if rpminfo['arch'] == 'src': #already imported above continue relpath = pathinfo.rpm(rpminfo) url = "%s/%s" % (build_url, relpath) logging.debug("url: %s" % url) fname = os.path.basename(relpath) logging.debug("fname: %s" % fname) try: _importURL(url, fname) except: logging.error("Importing %s failed" % fname) return False tagSuccessful(buildinfo['nvr'], tag) return True
def get_source_packages(tag, repo_id): """Find URLs to SRPMs used in repo ID for tag name.""" ks = koji.ClientSession(KOJIHUB) pathinfo = koji.PathInfo(topdir="") tinfo = ks.getTag(tag, strict=True) tag_id = tinfo["id"] repos = ks.getActiveRepos() # let's filter all active repos by tag repos = (repo for repo in repos if repo["tag_id"] == tag_id) try: repo = next(repo for repo in repos if repo["id"] == repo_id) except StopIteration: raise koji.GenericError( "No active repo for specified tag and id: {!r}, {!r}".format( tag, repo_id)) event_id = repo["create_event"] rpms, builds = ks.listTaggedRPMS(tag_id, event=event_id, inherit=True, latest=True, arch="src") build_idx = {b["id"]: b for b in builds} for rpm in rpms: build = build_idx[rpm["build_id"]] builddir = pathinfo.build(build) path = os.path.join(pathinfo.build(build), pathinfo.rpm(rpm)) click.echo(urljoin(KOJIPKGS, path))
def get_last_srpm(koji_session, tag, name, relative=False, topdir=None): """ Obtain data for latest SRPM of a package to be used for submitting a build. Returns SRPM info and URL pointing to it. May return None if no SRPM was found. :param koji_session: Koji session used for queries :param tag: Koji build tag name :param name: Package name :param relative: Whether the URL should be relative to Koji's work dir. Used for submitting scratch-builds from SRPMS existing in the same Koji. :param topdir: Alternative Koji topdir, defaults to one supplied in configuration. :return: a tuple of (srpm_info, srpm_url) or None. srpm_info is Koji's rpm info dictionary, contains 'epoch', 'version', 'release' fields (and more) srpm_url is the URL pointing to the SRPM. May be relative if `relative` is specified """ if not topdir: topdir = koji_session.config[ 'srpm_relative_path_root' if relative else 'topurl'] rel_pathinfo = koji.PathInfo(topdir=topdir) info = koji_session.listTagged(tag, latest=True, package=name, inherit=True) if info: srpms = koji_session.listRPMs(buildID=info[0]['build_id'], arches='src') if srpms: return (srpms[0], rel_pathinfo.build(info[0]) + '/' + rel_pathinfo.rpm(srpms[0]))
def __init__(self, tasker, workflow, target, hub, root, proxy=None, koji_ssl_certs_dir=None): """ constructor :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance :param target: string, koji target to use as a source :param hub: string, koji hub (xmlrpc) :param root: string, koji root (storage) :param koji_ssl_certs_dir: str, path to "cert", "ca", and "serverca" Note that this plugin requires koji_ssl_certs_dir set if Koji certificate is not trusted by CA bundle. """ # call parent constructor super(KojiPlugin, self).__init__(tasker, workflow) self.target = target koji_auth_info = None if koji_ssl_certs_dir: koji_auth_info = { 'ssl_certs_dir': koji_ssl_certs_dir, } self.xmlrpc = create_koji_session(hub, koji_auth_info) self.pathinfo = koji.PathInfo(topdir=root) self.proxy = proxy
def display_state(self, info, level=0): # We can sometimes be passed a task that is not yet open, but # not finished either. info would be none. if not info: return 'unknown' if info['state'] == koji.TASK_STATES['OPEN']: if info['host_id']: host = self.session.getHost(info['host_id']) return 'open (%s)' % host['name'] else: return 'open' elif info['state'] == koji.TASK_STATES['FAILED']: s = 'FAILED: %s' % self.get_failure() if self.topurl: # add relevant logs if there are any output = list_task_output_all_volumes(self.session, self.id) files = [] for filename, volumes in six.iteritems(output): files += [(filename, volume) for volume in volumes] files = [file_volume for file_volume in files if file_volume[0].endswith('log')] pi = koji.PathInfo(topdir=self.topurl) # indent more than current level level += 1 logs = [' ' * level + os.path.join(pi.task(self.id, f[1]), f[0]) for f in files] if logs: s += '\n' + ' ' * level + 'Relevant logs:\n' s += '\n'.join(logs) return s else: return koji.TASK_STATES[info['state']].lower()
def download_rpm(build, rpm, topurl, sigkey=None, quiet=False, noprogress=False): "Wrapper around download_file, do additional checks for rpm files" pi = koji.PathInfo(topdir=topurl) if sigkey: fname = pi.signed(rpm, sigkey) filesize = None else: fname = pi.rpm(rpm) filesize = rpm['size'] url = os.path.join(pi.build(build), fname) path = os.path.basename(fname) download_file(url, path, quiet=quiet, noprogress=noprogress, filesize=filesize) # size - we have stored size only for unsigned copies if not sigkey: size = os.path.getsize(path) if size != rpm['size']: os.unlink(path) error("Downloaded rpm %s size %d does not match db size %d, deleting" % (path, size, rpm['size'])) # basic sanity try: koji.check_rpm_file(path) except koji.GenericError as ex: os.unlink(path) warn(str(ex)) error("Downloaded rpm %s is not valid rpm file, deleting" % path) # payload hash sigmd5 = koji.get_header_fields(path, ['sigmd5'])['sigmd5'] if rpm['payloadhash'] != koji.hex_string(sigmd5): os.unlink(path) error("Downloaded rpm %s doesn't match db, deleting" % path)
def __init__(self, tasker, workflow, koji_hub, koji_root, koji_proxyuser=None, koji_ssl_certs_dir=None, koji_krb_principal=None, koji_krb_keytab=None, allowed_domains=None): """ :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance :param koji_hub: str, koji hub (xmlrpc) :param koji_root: str, koji root (storage) :param koji_proxyuser: str, proxy user :param koji_ssl_certs_dir: str, path to "cert", "ca", and "serverca" :param koji_krb_principal: str, name of Kerberos principal :param koji_krb_keytab: str, Kerberos keytab :param allowed_domains: list<str>: list of domains that are allowed to be used when fetching artifacts by URL (case insensitive) """ super(FetchMavenArtifactsPlugin, self).__init__(tasker, workflow) koji_auth = { 'proxyuser': koji_proxyuser, 'ssl_certs_dir': koji_ssl_certs_dir, 'krb_principal': koji_krb_principal, 'krb_keytab': koji_krb_keytab, } # Remove empty values from auth dict to avoid login when not needed koji_auth = {k: v for k, v in koji_auth.items() if v} self.koji_info = { 'hub': koji_hub, 'root': koji_root, 'auth': koji_auth or None } self.path_info = koji.PathInfo(topdir=self.koji_info['root']) self.allowed_domains = set(domain.lower() for domain in allowed_domains or []) self.workdir = self.workflow.source.get_build_file_path()[1] self.session = None
def setUp(self): self.tempdir = tempfile.mkdtemp() self.pathinfo = koji.PathInfo(self.tempdir) mock.patch('koji.pathinfo', new=self.pathinfo).start() self.hostcalls = kojihub.HostExports() self.context = mock.patch('kojihub.context').start() self.context.opts = {'EnableMaven': True} mock.patch('kojihub.Host').start() self.Task = mock.patch('kojihub.Task').start() self.Task.return_value.assertHost = mock.MagicMock() self.get_build = mock.patch('kojihub.get_build').start() self.get_maven_build = mock.patch('kojihub.get_maven_build').start() self.get_archive_type = mock.patch('kojihub.get_archive_type').start() mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start() mock.patch.object(kojihub.BuildRoot, 'load', new=self.my_buildroot_load).start() mock.patch('kojihub.import_archive_internal', new=self.my_import_archive_internal).start() mock.patch('kojihub._dml').start() mock.patch('kojihub._fetchSingle').start() mock.patch('kojihub.build_notification').start() mock.patch('kojihub.assert_policy').start() mock.patch('kojihub.check_volume_policy', return_value={ 'id': 0, 'name': 'DEFAULT' }).start() self.set_up_callbacks()
def buildroot_add_repos(self, dependencies): self._load_mock_config() for source, mmds in dependencies.items(): # If source starts with mock_resultdir, it means it is path to local # module build repository. if source.startswith(conf.mock_resultsdir): repo_name = os.path.basename(source) if repo_name.startswith("module-"): repo_name = repo_name[7:] repo_dir = source baseurl = "file://" + repo_dir # If source starts with "repofile://", it is path to local /etc/yum.repos.d # repo file. elif source.startswith("repofile://"): # For the base module, we want to include all the `conf.base_module_repofiles`. if len(mmds) == 1 and mmds[0].get_module_name( ) in conf.base_module_names: for repofile in conf.base_module_repofiles: self._add_repo_from_path(repofile) # Also set the platform_id. mmd = mmds[0] self.yum_conf = self.yum_conf.replace( "$module_platform_id", "%s:%s" % (mmd.get_module_name(), mmd.get_stream_name())) else: # Add repositories defined in repofile to mock config. repofile = source[len("repofile://"):] self._add_repo_from_path(repofile) # Enabled all the modular dependencies by default in Mock. for mmd in mmds: self.enabled_modules.append( "%s:%s" % (mmd.get_module_name(), mmd.get_stream_name())) continue else: repo_name = tag = source koji_config = get_koji_config(self.config) koji_session = koji.ClientSession(koji_config.server, opts=koji_config) # Check to see if there are any external repos tied to the tag for ext_repo in koji_session.getTagExternalRepos(tag): self._add_repo(ext_repo["external_repo_name"], ext_repo["url"]) repo = koji_session.getRepo(repo_name) if repo: baseurl = koji.PathInfo(topdir=koji_config.topurl).repo( repo["id"], repo_name) baseurl = "{0}/{1}/".format(baseurl, self.arch) else: repo_dir = os.path.join(self.config.cache_dir, "koji_tags", tag) should_add_repo = create_local_repo_from_koji_tag( self.config, tag, repo_dir, [self.arch, "noarch"]) if not should_add_repo: continue baseurl = "file://" + repo_dir self._add_repo(repo_name, baseurl) self._write_mock_config()
def _build_srpm_url(self, rpm=None, build=None): if self._pathinfo is None and self.topurl is not None: self._pathinfo = koji.PathInfo(topdir=self.topurl) # TODO: add error handling. srpm_path = self._pathinfo.rpm(rpm) base_path = self._pathinfo.build(build) return os.path.join(base_path, srpm_path)
def make_repos_for_target(self, target_info): repo_info = self.getRepo(target_info['build_tag']) if not repo_info: return None self.logger.debug("repo info: %s", str(repo_info)) path_info = koji.PathInfo(topdir=self.options.topurl) repourl = path_info.repo(repo_info['id'], target_info['build_tag_name']) self.logger.debug("repo url: %s", repourl) return [Repository(repourl + "/$arch")]
def get_last_srpm(koji_session, tag, name): rel_pathinfo = koji.PathInfo(topdir=get_config('koji_config.srpm_relative_path_root')) info = koji_session.listTagged(tag, latest=True, package=name, inherit=True) if info: srpms = koji_session.listRPMs(buildID=info[0]['build_id'], arches='src') if srpms: return (srpms[0], rel_pathinfo.build(info[0]) + '/' + rel_pathinfo.rpm(srpms[0]))
def get_koji_pathinfo(profile): """ Return a Koji PathInfo object for our profile. :param str profile: profile name, like "koji" or "cbs" :returns: koji.PathInfo """ conf = koji.read_config(profile) top = conf['topurl'] pathinfo = koji.PathInfo(topdir=top) return pathinfo
def get_cg_metadata_url(self, buildinfo): """ Return url of the CG metadata.json buildinfo may be either a int ID, a string NVR, or a map containing 'name', 'version' and 'release. Note: it doesn't check whether the metadata.json exists or not. """ build_info = self.get_build(buildinfo) return koji.PathInfo( topdir=self.topurl).build(build_info) + '/metadata.json'
def get_latest_repo(tag): """Find latest repo ID and its URL for tag name. \b Example of output: 756330 https://kojipkgs.fedoraproject.org/repos/f27-build/756330 """ ks = koji.ClientSession(KOJIHUB) pathinfo = koji.PathInfo(topdir="") repo = ks.getRepo(tag, state=koji.REPO_READY) repo_id = repo["id"] path = pathinfo.repo(repo_id, tag) click.echo("{} {}".format(repo_id, urljoin(KOJIPKGS, path)))
def download_archive(build, archive, topurl, quiet=False, noprogress=False): "Wrapper around download_file, do additional checks for archive files" pi = koji.PathInfo(topdir=topurl) if archive['btype'] == 'maven': url = os.path.join(pi.mavenbuild(build), pi.mavenfile(archive)) path = pi.mavenfile(archive) elif archive['btype'] == 'win': url = os.path.join(pi.winbuild(build), pi.winfile(archive)) path = pi.winfile(archive) elif archive['btype'] == 'image': url = os.path.join(pi.imagebuild(build), archive['filename']) path = archive['filename'] else: # non-legacy types are more systematic directory = pi.typedir(build, archive['btype']) url = os.path.join(directory, archive['filename']) path = archive['filename'] download_file(url, path, quiet=quiet, noprogress=noprogress, filesize=archive['size']) # check size if os.path.getsize(path) != archive['size']: os.unlink(path) error("Downloaded rpm %s size does not match db size, deleting" % path) # check checksum/checksum_type if archive['checksum_type'] == koji.CHECKSUM_TYPES['md5']: hash = md5_constructor() elif archive['checksum_type'] == koji.CHECKSUM_TYPES['sha1']: hash = hashlib.sha1() # nosec elif archive['checksum_type'] == koji.CHECKSUM_TYPES['sha256']: hash = hashlib.sha256() else: # shouldn't happen error("Unknown checksum type: %s" % archive['checksum_type']) with open(path, "rb") as f: while True: chunk = f.read(1024**2) hash.update(chunk) if not chunk: break if hash.hexdigest() != archive['checksum']: os.unlink(path) error("Downloaded archive %s doesn't match checksum, deleting" % path)
def get_last_srpm(koji_session, tag, name, relative=False, topdir=None): if not topdir: topdir = koji_session.config[ 'srpm_relative_path_root' if relative else 'topurl'] rel_pathinfo = koji.PathInfo(topdir=topdir) info = koji_session.listTagged(tag, latest=True, package=name, inherit=True) if info: srpms = koji_session.listRPMs(buildID=info[0]['build_id'], arches='src') if srpms: return (srpms[0], rel_pathinfo.build(info[0]) + '/' + rel_pathinfo.rpm(srpms[0]))
def __init__(self, tasker, workflow, target, hub, root): """ constructor :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance :param target: string, koji target to use as a source :param hub: string, koji hub (xmlrpc) :param root: string, koji root (storage) """ # call parent constructor super(KojiPlugin, self).__init__(tasker, workflow) self.target = target self.xmlrpc = koji.ClientSession(hub) self.pathinfo = koji.PathInfo(topdir=root)
def setUp(self): self.tempdir = tempfile.mkdtemp() self.pathinfo = koji.PathInfo(self.tempdir) mock.patch('koji.pathinfo', new=self.pathinfo).start() self.InsertProcessor = mock.patch('kojihub.InsertProcessor', side_effect=self.getInsert).start() self.inserts = [] self.get_tag = mock.patch('kojihub.get_tag').start() self.get_event = mock.patch('kojihub.get_event').start() self.nextval = mock.patch('kojihub.nextval').start() self.copyfile = mock.patch('shutil.copyfile').start() self.get_tag.return_value = {'id': 42, 'name': 'tag'} self.get_event.return_value = 12345 self.nextval.return_value = 99
def get_koji_archives(tag, package): """ Get the list of archives to download from koji given a release tag and a package name. """ client = koji.ClientSession("https://koji.fedoraproject.org/kojihub") builds = client.listTagged(tag, latest=True, package=package, type="image") images = client.listArchives(buildID=builds[0]["id"], type="image") pi = koji.PathInfo(topdir=DOWNLOAD_URL) urls = [] for image in images: if ".tar.xz" in image["filename"]: urls.append(f"{pi.imagebuild(builds[0])}/{image['filename']}") return urls
def mock_nvr_downloads(build_info=None, archives=None, overrides=None): if not build_info: build_info = DEFAULT_KOJI_BUILD if not archives: archives = DEFAULT_ARCHIVES if not overrides: overrides = {} pi = koji.PathInfo(topdir=KOJI_ROOT) for archive in archives: url = pi.mavenbuild(build_info) + '/' + pi.mavenfile(archive) # Use any overrides for this archive ID archive_overrides = overrides.get(archive['id'], {}) status = archive_overrides.get('status', 200) body = archive_overrides.get('body', archive['filename'] + archive['group_id']) responses.add(responses.GET, url, body=body, status=status)
def run(self): """ Execute downloads """ urls = [] pathinfo = koji.PathInfo(topdir=self.koji.opts['topurl']) config = Config() for build in self._builds: try: info = self.koji.getBuild(build) rpms = self.koji.listRPMs(buildID=info['id'], arches=self._arches) fnames = [pathinfo.rpm(rpm) for rpm in rpms] urls += [ pathinfo.build(info) + '/' + fname for fname in fnames ] except Exception: print('SKIPPED: build {} does not exists'.format(build)) for url in urls: download_packages(url, config.output_dir)
def process_build(build_id): build_id = int(build_id) pathinfo = koji.PathInfo(topdir=topurl) session = koji.ClientSession(server) rinfo = session.getBuild(build_id) rpms = session.listRPMs(buildID=build_id) urls = [] for rpm in rpms: fname = koji.pathinfo.rpm(rpm) url = os.path.join(pathinfo.build(rinfo), fname) urls.append(url) # print ">>", url # status = requests.request("HEAD", url).status_code # if status == 404: # print "[-] failed for", url # sys.exit(-1) return download_urls(urls)
def download_build(cls, session, build_id, destination, arches): """Downloads RPMs and logs of a Koji build. Args: session (koji.ClientSession): Active Koji session instance. build_id (str): Koji build ID. destination (str): Path where to download files to. arches (list): List of architectures to be downloaded. Returns: tuple: List of downloaded RPMs and list of downloaded logs. Raises: DownloadError: If download failed. """ build = session.getBuild(build_id) pathinfo = koji.PathInfo(topdir=session.opts['topurl']) rpms: List[str] = [] logs: List[str] = [] os.makedirs(destination, exist_ok=True) for pkg in session.listBuildRPMs(build_id): if pkg['arch'] not in arches: continue rpmpath = pathinfo.rpm(pkg) local_path = os.path.join(destination, os.path.basename(rpmpath)) if local_path not in rpms: url = pathinfo.build(build) + '/' + rpmpath DownloadHelper.download_file(url, local_path) rpms.append(local_path) for logfile in session.getBuildLogs(build_id): if logfile['dir'] not in arches: continue local_path = os.path.join(destination, logfile['name']) if local_path not in logs: url = pathinfo.topdir + '/' + logfile['path'] DownloadHelper.download_file(url, local_path) logs.append(local_path) return rpms, logs
def setUp(self): self.tempdir = tempfile.mkdtemp() self.pathinfo = koji.PathInfo(self.tempdir) mock.patch('koji.pathinfo', new=self.pathinfo).start() mock.patch('kojihub.lookup_name', new=self.my_lookup_name).start() self.check_volume_policy = mock.patch('kojihub.check_volume_policy', return_value={ 'id': 0, 'name': 'DEFAULT' }).start() self.buildinfo = { 'id': 137, 'task_id': 'TASK_ID', 'name': 'some-image', 'version': '1.2.3.4', 'release': '3', 'epoch': None, 'source': None, 'state': koji.BUILD_STATES['BUILDING'], 'volume_id': 0, 'volume_name': 'DEFAULT', }
def refresh_package(self, package): if self.tempdnfcache.get(str(package), None): # TODO: better handling of multiple properties package.chksum = self.tempdnfcache[str(package)].chksum package.url = self.tempdnfcache[str(package)].url package.source_name = self.tempdnfcache[str(package)].source_name return kojicli = koji.ClientSession('https://koji.fedoraproject.org/kojihub') pathinfo = koji.PathInfo('https://kojipkgs.fedoraproject.org') # TODO: We need to ensure to get signed package from koji, either by # direct download or by reconstructed RPM with detached signature rpminfo = kojicli.getRPM(package.to_rpmfname()) if not rpminfo: return # we need to determinate the source package for the baseurl if not rpminfo.get('buildroot_id', None): return buildroot = kojicli.getBuildroot(rpminfo['buildroot_id']) task = kojicli.getTaskInfo(buildroot['task_id'], request=True) # tasks/6906/41136906/chkconfig-1.11-6.fc32.src.rpm srcpkg = parsePkg(task['request'][0].split('/')[-1]) if not srcpkg: return signed_rpmfname = pathinfo.signed(package.to_dict(), self.buildinfo.get_fedora_keyid()) signed_rpmurl = os.path.join(pathinfo.build(srcpkg.to_dict()), signed_rpmfname) rpmfname = pathinfo.rpm(package.to_dict()) rpmurl = os.path.join(pathinfo.build(srcpkg.to_dict()), rpmfname) sighdr_rpmfname = pathinfo.sighdr(package.to_dict(), self.buildinfo.get_fedora_keyid()) sighdr_rpmurl = os.path.join(pathinfo.build(srcpkg.to_dict()), sighdr_rpmfname) if self.get_response(signed_rpmurl).ok: package.url = signed_rpmurl elif self.get_response(rpmurl).ok and self.get_response(sighdr_rpmurl): package.url = rpmurl package.sighdr_url = sighdr_rpmurl
def _mirror_rpms_for_build(self, koji_profile, builds, rpms): topurl = koji_config.get(koji_profile, 'topurl') pathinfo = koji.PathInfo(topurl) builds = dict((build['build_id'], build) for build in builds) for rpm in rpms: if rpm['arch'] not in self.arches + ['noarch', 'src']: continue if rpm['name'] not in self.rpm_names and rpm['arch'] != 'src': continue filename = os.path.join(self.output_dir, os.path.basename(pathinfo.rpm(rpm))) if self.signed: key_id = GPG_KEY_ID if koji_profile == 'koji': key_id = EPEL_GPG_KEY_ID # For signed RPMs, the actual file we want to download is # always *bigger* than the size indicated in Koji. Koji only # tracks the original size of the RPM before signing. if os.path.exists( filename) and os.path.getsize(filename) > rpm['size']: print('Skipping %s' % filename) else: url = os.path.join(pathinfo.build(builds[rpm['build_id']]), pathinfo.signed(rpm, key_id)) print('Fetching %s' % url) fetch(url, filename) else: # For unsigned RPMs we can check the exact size here. if os.path.exists(filename) and os.path.getsize( filename) == rpm['size']: print('Skipping %s' % filename) else: url = os.path.join(pathinfo.build(builds[rpm['build_id']]), pathinfo.rpm(rpm)) print('Fetching %s' % url) fetch(url, filename) self.rpm_filenames.add(os.path.basename(filename))
def run(self): """ Execute downloads """ urls = [] pathinfo = koji.PathInfo(topdir=self.koji.opts['topurl']) for build in self._builds: try: info = self.koji.getBuild(build) rpms = self.koji.listRPMs(buildID=info['id'], arches=self._arches) fnames = [] for rpm in rpms: if not filter_subpkgs([rpm], self._exclude): fnames.append(pathinfo.rpm(rpm)) urls += [ pathinfo.build(info) + '/' + fname for fname in fnames ] except Exception: print('SKIPPED: build {} does not exists'.format(build)) packages_dir = os.path.join(self._config.output_dir, "Packages") if not os.path.exists(packages_dir): os.makedirs(packages_dir) for url in urls: download_packages(url, packages_dir)
# This copyrighted material is made available to anyone wishing # to use, modify, copy, or redistribute it subject to the terms # and conditions of the GNU General Public License version 2. """ This tool downloads the latest build from koji based on the tag, rpm name and download to location provided as arguments. """ import koji import os.path import urllib2 import argparse import json pathinfo = koji.PathInfo(topdir='https://kojipkgs.fedoraproject.org') koji = koji.ClientSession('http://koji.fedoraproject.org/kojihub') parser = argparse.ArgumentParser(description='This tool downloads the latest \ builds from koji based on its tag, rpm name \ and download to location provided as part of \ its arguments.') # Parse options required = parser.add_argument_group('required arguments') required.add_argument('--koji-tag', help='koji tag', required=True) required.add_argument('--pkg', help='pkg name', required=True) parser.add_argument('--arch', help='Machine arch. Defaults to all it not \ provided') required.add_argument('--location', help='Absolute path of download \ to directory', required=True) args = parser.parse_args()