def prepare_artifacts(self): """Goes through artifacts section of image descriptor and fetches all of them """ if 'artifacts' not in self.image: logger.debug("No artifacts to fetch") return logger.info("Handling artifacts...") target_dir = os.path.join(self.target, 'image') for artifact in self.image['artifacts']: artifact_cache = ArtifactCache() if isinstance(artifact, _PlainResource): if artifact_cache.is_cached(artifact): pass elif not artifact_cache.is_cached(artifact) and \ config.get('common', 'redhat'): artifact.url = get_brew_url(artifact['md5']) else: if 'description' in artifact: logger.error( "Cannot fetch Artifact: '%s', %s" % (artifact['name'], artifact['description'])) raise CekitError( "Cannot fetch Artifact: '%s', please cache it via cekit-cache." % artifact['name']) artifact.copy(target_dir) logger.debug("Artifacts handled")
def run(self): tools.cfg = tools.get_cfg(self.args.config) if self.args.verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) if self.args.work_dir: tools.cfg['common']['work_dir'] = self.args.work_dir if self.args.cmd == 'add': artifact_cache = ArtifactCache() resource = {} resource['url'] = self.args.url for alg in SUPPORTED_HASH_ALGORITHMS: val = getattr(self.args, alg) if val: resource[alg] = val artifact = Resource(resource) if artifact_cache.is_cached(artifact): print('Artifact is already cached!') sys.exit(0) try: artifact_id = artifact_cache.add(artifact) if self.args.verbose: print(artifact_id) except Exception as ex: if self.args.verbose: traceback.print_exc() else: print(ex) sys.exit(1) if self.args.cmd == 'ls': self.list() if self.args.cmd == 'rm': try: artifact_cache = ArtifactCache() artifact_cache.delete(self.args.uuid) print("Artifact removed") except Exception: print("Artifact doesn't exists") sys.exit(1) sys.exit(0)
class Resource(Descriptor): CHECK_INTEGRITY = True def __new__(cls, resource, **kwargs): if cls is Resource: if 'path' in resource: return super(Resource, cls).__new__(_PathResource) elif 'url' in resource: return super(Resource, cls).__new__(_UrlResource) elif 'git' in resource: return super(Resource, cls).__new__(_GitResource) elif 'md5' in resource: return super(Resource, cls).__new__(_PlainResource) raise CekitError("Resource type is not supported: %s" % resource) def __init__(self, descriptor): self.schemas = [ yaml.safe_load(""" map: name: {type: str} git: map: url: {type: str, required: True} ref: {type: str} path: {type: str, required: False} url: {type: str, required: False} md5: {type: str} sha1: {type: str} sha256: {type: str} description: {type: str} target: {type: str} assert: \"val['git'] is not None or val['path'] is not None or val['url] is not None or val['md5'] is not None\"""" ) ] super(Resource, self).__init__(descriptor) self.skip_merging = ['md5', 'sha1', 'sha256'] # forwarded import to prevent circural imports from cekit.cache.artifact import ArtifactCache self.cache = ArtifactCache() self.name = descriptor['name'] self.description = None if 'description' in descriptor: self.description = descriptor['description'] def __eq__(self, other): # All subclasses of Resource are considered same object type if isinstance(other, Resource): return self['name'] == other['name'] return NotImplemented def __ne__(self, other): # All subclasses of Resource are considered same object type if isinstance(other, Resource): return not self['name'] == other['name'] return NotImplemented def _copy_impl(self, target): raise NotImplementedError("Implement _copy_impl() for Resource: " + self.__module__ + "." + type(self).__name__) def target_file_name(self): if 'target' not in self: self['target'] = os.path.basename(self.name) return self['target'] def copy(self, target=os.getcwd()): if os.path.isdir(target): target = os.path.join(target, self.target_file_name()) logger.debug("Preparing resource '%s'" % (self.name)) if os.path.exists(target) and self.__verify(target): logger.debug("Local resource '%s' exists and is valid, skipping" % self.name) return target if self.cache.is_cached(self): cached_resource = self.cache.get(self) shutil.copy(cached_resource['cached_path'], target) logger.info("Using cached artifact '%s'." % self.name) else: try: self.cache.add(self) cached_resource = self.cache.get(self) shutil.copy(cached_resource['cached_path'], target) logger.info("Using cached artifact '%s'." % self.name) except ValueError: return self.guarded_copy(target) def guarded_copy(self, target): try: self._copy_impl(target) except Exception as ex: logger.warn( "Cekit is not able to fetch resource '%s' automatically. " "You can manually place required artifact as '%s'" % (self.name, target)) if self.description: logger.info(self.description) # exception is fatal we be logged before Cekit dies raise CekitError( "Error copying resource: '%s'. See logs for more info." % self.name, ex) if set(SUPPORTED_HASH_ALGORITHMS).intersection(self) and \ not self.__verify(target): raise CekitError('Artifact verification failed!') return target def __verify(self, target): """ Checks all defined check_sums for an aritfact """ if not set(SUPPORTED_HASH_ALGORITHMS).intersection(self): logger.debug("Artifact '%s' lacks any checksum definition." % self.name) return False if not Resource.CHECK_INTEGRITY: logger.info("Integrity checking disabled, skipping verification.") return True if os.path.isdir(target): logger.info("Target is directory, cannot verify checksum.") return True for algorithm in SUPPORTED_HASH_ALGORITHMS: if algorithm in self: if not check_sum(target, algorithm, self[algorithm], self['name']): return False return True def __substitute_cache_url(self, url): cache = config.get('common', 'cache_url') if not cache: return url for algorithm in SUPPORTED_HASH_ALGORITHMS: if algorithm in self: logger.debug("Using %s to fetch artifacts from cacher." % algorithm) return (cache.replace('#filename#', self.name).replace( '#algorithm#', algorithm).replace('#hash#', self[algorithm])) return url def _download_file(self, url, destination, use_cache=True): """ Downloads a file from url and save it as destination """ if use_cache: url = self.__substitute_cache_url(url) logger.debug("Downloading from '%s' as %s" % (url, destination)) parsedUrl = urlparse(url) if parsedUrl.scheme == 'file' or not parsedUrl.scheme: if os.path.isdir(parsedUrl.path): shutil.copytree(parsedUrl.path, destination) else: shutil.copy(parsedUrl.path, destination) elif parsedUrl.scheme in ['http', 'https']: verify = config.get('common', 'ssl_verify') if str(verify).lower() == 'false': verify = False ctx = ssl.create_default_context() if not verify: ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE res = urlopen(url, context=ctx) if res.getcode() != 200: raise CekitError("Could not download file from %s" % url) with open(destination, 'wb') as f: while True: chunk = res.read(1024) if not chunk: break f.write(chunk) else: raise CekitError("Unsupported URL scheme: %s" % (url))