def add(self, artifact): if not set(SUPPORTED_HASH_ALGORITHMS).intersection(artifact): raise ValueError('Cannot cache Artifact without checksum') if self.is_cached(artifact): raise CekitError('Artifact is already cached') artifact_id = str(uuid.uuid4()) artifact_file = os.path.expanduser( os.path.join(self._cache_dir, artifact_id)) if not os.path.exists(artifact_file): artifact.guarded_copy(artifact_file) cache_entry = { 'names': [artifact['name']], 'cached_path': artifact_file } for alg in SUPPORTED_HASH_ALGORITHMS: if alg in artifact: if not check_sum(artifact_file, alg, artifact[alg]): raise CekitError('Artifact contains invalid checksum!') chksum = artifact[alg] else: chksum = get_sum(artifact_file, alg) cache_entry.update({alg: chksum}) self._update_cache(cache_entry, artifact_id) return artifact_id
def add(self, artifact): if not set(SUPPORTED_HASH_ALGORITHMS).intersection(artifact): raise ValueError('Cannot cache artifact without checksum') if self.cached(artifact): raise CekitError('Artifact is already cached!') artifact_id = str(uuid.uuid4()) artifact_file = os.path.expanduser( os.path.join(self.cache_dir, artifact_id)) if not os.path.exists(artifact_file): artifact.guarded_copy(artifact_file) cache_entry = { 'names': [artifact['name']], 'cached_path': artifact_file } # We should populate the cache entry with checksums for all supported algorithms for alg in SUPPORTED_HASH_ALGORITHMS: cache_entry.update({alg: get_sum(artifact_file, alg)}) self._update_cache(cache_entry, artifact_id) return artifact_id
def prepare_artifacts(self): """Goes through artifacts section of image descriptor and fetches all of them """ logger.info("Handling artifacts for OSBS...") target_dir = os.path.join(self.target, 'image') fetch_artifacts_url = [] url_description = {} for image in self.images: for artifact in image.all_artifacts: logger.info("Preparing artifact '{}' (of type {})".format(artifact['name'], type(artifact))) if isinstance(artifact, _UrlResource): intersected_hash = [x for x in crypto.SUPPORTED_HASH_ALGORITHMS if x in artifact] logger.debug("Found checksum markers of {}".format(intersected_hash)) if not intersected_hash: logger.warning("No md5 supplied for {}, calculating from the remote artifact".format(artifact['url'])) intersected_hash = ["md5"] tmpfile = tempfile.NamedTemporaryFile() try: artifact.download_file(artifact['url'], tmpfile.name) artifact["md5"] = crypto.get_sum(tmpfile.name, "md5") finally: tmpfile.close() fetch_artifacts_url.append({'url': artifact['url'], 'target': os.path.join(artifact['target'])}) for c in intersected_hash: fetch_artifacts_url[0].update({c: artifact[c]}) if 'description' in artifact: url_description[artifact['url']] = artifact['description'] logger.debug( "Artifact '{}' (as URL) added to fetch-artifacts-url.yaml".format(artifact['target'])) # OSBS by default downloads all artifacts to artifacts/<target_path> artifact['target'] = os.path.join('artifacts', artifact['target']) elif isinstance(artifact, _PlainResource) and config.get('common', 'redhat'): try: fetch_artifacts_url.append({'md5': artifact['md5'], 'url': get_brew_url(artifact['md5']), 'target': os.path.join(artifact['target'])}) logger.debug( "Artifact '{}' (as plain) added to fetch-artifacts-url.yaml".format(artifact['target'])) # OSBS by default downloads all artifacts to artifacts/<target_path> artifact['target'] = os.path.join('artifacts', artifact['target']) except: logger.warning("Plain artifact {} could not be found in Brew, trying to handle it using lookaside cache". format(artifact['name'])) artifact.copy(target_dir) # TODO: This is ugly, rewrite this! artifact['lookaside'] = True else: artifact.copy(target_dir) fetch_artifacts_file = os.path.join(self.target, 'image', 'fetch-artifacts-url.yaml') if fetch_artifacts_url: with open(fetch_artifacts_file, 'w') as _file: yaml.safe_dump(fetch_artifacts_url, _file, default_flow_style=False) if config.get('common', 'redhat'): for key,value in url_description.items(): logger.debug("Processing to add build references for {} -> {}".format(key, value)) for line in fileinput.input(fetch_artifacts_file, inplace=1): line = line.replace(key, key + ' # ' + value) sys.stdout.write(line) logger.debug("Artifacts handled")