def _download_source(cls, tool, url, package, filename, hashtype, hsh, target=None): if target is None: target = os.path.basename(filename) if os.path.exists(target): if cls._hash(target, hashtype) == hsh: # nothing to do return else: os.unlink(target) if tool == 'fedpkg': url = '{0}/{1}/{2}/{3}/{4}/{2}'.format(url, package, filename, hashtype, hsh) else: url = '{0}/{1}/{2}/{3}/{2}'.format(url, package, filename, hsh) try: DownloadHelper.download_file(url, target) except DownloadError as e: raise LookasideCacheError(six.text_type(e))
def _get_lookaside_sources(self) -> List[Dict[str, str]]: """ Read "sources" file from the dist-git repo and return a list of dicts with path and url to sources stored in the lookaside cache """ pkg_tool = self.pkg_tool or self.config.pkg_tool try: config = LookasideCacheHelper._read_config(pkg_tool) base_url = config["lookaside"] except (configparser.Error, KeyError) as e: raise LookasideCacheError( "Failed to read rpkg configuration") from e package = self.pkg_name basepath = self.dist_git.working_dir sources = [] for source in LookasideCacheHelper._read_sources(basepath): url = "{0}/rpms/{1}/{2}/{3}/{4}/{2}".format( base_url, package, source["filename"], source["hashtype"], source["hash"], ) path = source["filename"] sources.append({"path": path, "url": url}) return sources
def update_sources(cls, tool, basepath, package, old_sources, new_sources, upload=True): try: config = cls._read_config(tool) url = config['lookaside_cgi'] hashtype = config['lookasidehash'] except (configparser.Error, KeyError): raise LookasideCacheError('Failed to read rpkg configuration') uploaded = [] sources = cls._read_sources(basepath) for idx, src in enumerate(old_sources): indexes = [ i for i, s in enumerate(sources) if s['filename'] == src ] if indexes: filename = new_sources[idx] if filename == src: # no change continue hsh = cls._hash(filename, hashtype) if upload: cls._upload_source(url, package, filename, hashtype, hsh) uploaded.append(filename) sources[indexes[0]] = dict(hash=hsh, filename=filename, hashtype=hashtype) cls._write_sources(basepath, sources) return uploaded
def post(check_only=False): def _post(url, data, headers): try: # try to authenticate using opportunistic auth first return requests.post(url, data=data, headers=headers, auth=requests_gssapi.HTTPSPNEGOAuth( opportunistic_auth=True)) except requests_gssapi.exceptions.SPNEGOExchangeError: return requests.post(url, data=data, headers=headers, auth=requests_gssapi.HTTPSPNEGOAuth()) cd = ChunkedData(check_only) if 'devel.redhat.com' in url: # the only server that properly handles chunked POST requests r = _post(url, cd, cd.headers) else: fp = FakeProgress(check_only) fp.start() try: r = _post(url, cd.data, cd.headers) finally: fp.stop() if not 200 <= r.status_code < 300: raise LookasideCacheError('{0}: {1}'.format( r.reason, r.text.strip())) return r.content
def download(cls, tool, basepath, package): try: config = cls._read_config(tool) url = config['lookaside'] except (configparser.Error, KeyError): raise LookasideCacheError('Failed to read rpkg configuration') for source in cls._read_sources(basepath): cls._download_source(tool, url, package, source['filename'], source['hashtype'], source['hash'])
def download(cls, tool, basepath, package, target_dir=None): try: config = cls._read_config(tool) url = config['lookaside'] except (configparser.Error, KeyError) as e: raise LookasideCacheError( 'Failed to read rpkg configuration') from e for source in cls._read_sources(basepath): target = os.path.join(target_dir, source['filename']) cls._download_source(tool, url, package, source['filename'], source['hashtype'], source['hash'], target)
def _hash(cls, filename, hashtype): try: chksum = hashlib.new(hashtype) except ValueError: raise LookasideCacheError( 'Unsupported hash type \'{}\''.format(hashtype)) with open(filename, 'rb') as f: chunk = f.read(8192) while chunk: chksum.update(chunk) chunk = f.read(8192) return chksum.hexdigest()
def post(check_only=False): cd = ChunkedData(check_only) if 'src.fedoraproject.org' in url: # src.fedoraproject.org seems to have trouble with chunked requests, don't even try fp = FakeProgress(check_only) fp.start() try: r = requests.post(url, data=cd.data, headers=cd.headers, auth=auth) finally: fp.stop() else: r = requests.post(url, data=cd, headers=cd.headers, auth=auth) if not 200 <= r.status_code < 300: raise LookasideCacheError('{0}: {1}'.format( r.reason, r.text.strip())) return r.content
def post(check_only=False): cd = ChunkedData(check_only) r = requests.post(url, data=cd, headers=cd.headers, auth=auth) if not 200 <= r.status_code < 300: raise LookasideCacheError(r.reason) return r.content