def run(self): tools.cfg = tools.get_cfg(self.args.config) if self.args.verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) if self.args.work_dir: tools.cfg['common']['work_dir'] = self.args.work_dir if self.args.cmd == 'add': artifact_cache = ArtifactCache() resource = {} resource['url'] = self.args.url for alg in SUPPORTED_HASH_ALGORITHMS: val = getattr(self.args, alg) if val: resource[alg] = val artifact = Resource(resource) if artifact_cache.is_cached(artifact): print('Artifact is already cached!') sys.exit(0) try: artifact_id = artifact_cache.add(artifact) if self.args.verbose: print(artifact_id) except Exception as ex: if self.args.verbose: traceback.print_exc() else: print(ex) sys.exit(1) if self.args.cmd == 'ls': self.list() if self.args.cmd == 'rm': try: artifact_cache = ArtifactCache() artifact_cache.delete(self.args.uuid) print("Artifact removed") except Exception: print("Artifact doesn't exists") sys.exit(1) sys.exit(0)
def __init__(self, descriptor): self.schemas = [ yaml.safe_load(""" map: name: {type: str} git: map: url: {type: str, required: True} ref: {type: str} path: {type: str, required: False} url: {type: str, required: False} md5: {type: str} sha1: {type: str} sha256: {type: str} sha512: {type: str} description: {type: str} target: {type: str} assert: \"val['git'] is not None or val['path'] is not None or val['url] is not None or val['md5'] is not None\"""" ) ] super(Resource, self).__init__(descriptor) self.skip_merging = ['md5', 'sha1', 'sha256', 'sha512'] # forwarded import to prevent circural imports from cekit.cache.artifact import ArtifactCache self.cache = ArtifactCache() self.name = descriptor['name'] self.description = None if 'description' in descriptor: self.description = descriptor['description']
def add(self, location, md5, sha1, sha256, sha512): artifact_cache = ArtifactCache() resource = {} resource['url'] = location if md5: resource['md5'] = md5 if sha1: resource['sha1'] = sha1 if sha256: resource['sha256'] = sha256 if sha512: resource['sha512'] = sha512 artifact = create_resource(resource) cached = artifact_cache.cached(artifact) if cached: click.echo("Artifact {} is already cached!".format(location)) sys.exit(0) try: artifact_id = artifact_cache.add(artifact) click.echo("Artifact {} cached with UUID '{}'".format( location, artifact_id)) except Exception as ex: # pylint: disable=broad-except click.secho("Cannot cache artifact {}: {}".format( location, str(ex)), fg='red') sys.exit(1)
def prepare_artifacts(self): """Goes through artifacts section of image descriptor and fetches all of them """ if 'artifacts' not in self.image: logger.debug("No artifacts to fetch") return logger.info("Handling artifacts...") target_dir = os.path.join(self.target, 'image') for artifact in self.image['artifacts']: artifact_cache = ArtifactCache() if isinstance(artifact, _PlainResource): if artifact_cache.is_cached(artifact): pass elif not artifact_cache.is_cached(artifact) and \ config.get('common', 'redhat'): artifact.url = get_brew_url(artifact['md5']) else: if 'description' in artifact: logger.error( "Cannot fetch Artifact: '%s', %s" % (artifact['name'], artifact['description'])) raise CekitError( "Cannot fetch Artifact: '%s', please cache it via cekit-cache." % artifact['name']) artifact.copy(target_dir) logger.debug("Artifacts handled")
def rm(self, uuid): artifact_cache = ArtifactCache() try: artifact_cache.delete(uuid) click.echo("Artifact with UUID '{}' removed".format(uuid)) except Exception: # pylint: disable=broad-except click.secho("Artifact with UUID '{}' doesn't exists in the cache".format(uuid), fg='yellow') sys.exit(1)
def list(self): artifact_cache = ArtifactCache() artifacts = artifact_cache.list() if artifacts: print("Cached artifacts:") for artifact_id, artifact in artifacts.items(): print("%s:" % artifact_id) for alg in SUPPORTED_HASH_ALGORITHMS: print(" %s: %s" % (alg, artifact[alg])) if artifact['names']: print(" names:") for name in artifact['names']: print(" %s" % name) else: print('No artifacts cached!')
def ls(self): artifact_cache = ArtifactCache() artifacts = artifact_cache.list() if artifacts: for artifact_filename, artifact in artifacts.items(): click.echo("\n{}:".format(click.style( artifact_filename.split('.')[0], fg='green', bold=True))) for alg in SUPPORTED_HASH_ALGORITHMS: if alg in artifact and artifact[alg]: click.echo(" {}: {}".format(click.style(alg, bold=True), artifact[alg])) if artifact['names']: click.echo(" {}:".format(click.style("names", bold=True))) for name in artifact['names']: click.echo(" - %s" % name) else: click.echo('No artifacts cached!')
def clear(self): """ Removes the artifact cache directory with all artifacts. Use with caution! """ artifact_cache = ArtifactCache() if not click.confirm("Are you sure to remove all artifacts from cache?", show_default=True): return try: shutil.rmtree(artifact_cache.cache_dir) click.echo("Artifact cache cleared!") except Exception: # pylint: disable=broad-except click.secho("An error occurred while removing the artifact cache directory '{}'".format( artifact_cache.cache_dir), fg='red') sys.exit(1)
def __init__(self, descriptor): # Schema must be provided by the implementing class if not self.schema: raise CekitError("Resource '{}' has no schema defined".format( type(self).__name__)) # Includes validation super(Resource, self).__init__(descriptor) # Make sure the we have 'name' set self._ensure_name(descriptor) # Make sure the we have 'target' set self._ensure_target(descriptor) # Convert the dictionary into a Map object for easier access self._descriptor = self.__to_map(descriptor) self.skip_merging = ['md5', 'sha1', 'sha256', 'sha512'] # forwarded import to prevent circular imports from cekit.cache.artifact import ArtifactCache self.cache = ArtifactCache()