def add(self, location, md5, sha1, sha256, sha512): artifact_cache = ArtifactCache() resource = {} resource['url'] = location if md5: resource['md5'] = md5 if sha1: resource['sha1'] = sha1 if sha256: resource['sha256'] = sha256 if sha512: resource['sha512'] = sha512 artifact = create_resource(resource) cached = artifact_cache.cached(artifact) if cached: click.echo("Artifact {} is already cached!".format(location)) sys.exit(0) try: artifact_id = artifact_cache.add(artifact) click.echo("Artifact {} cached with UUID '{}'".format( location, artifact_id)) except Exception as ex: # pylint: disable=broad-except click.secho("Cannot cache artifact {}: {}".format( location, str(ex)), fg='red') sys.exit(1)
def run(self): tools.cfg = tools.get_cfg(self.args.config) if self.args.verbose: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) if self.args.work_dir: tools.cfg['common']['work_dir'] = self.args.work_dir if self.args.cmd == 'add': artifact_cache = ArtifactCache() resource = {} resource['url'] = self.args.url for alg in SUPPORTED_HASH_ALGORITHMS: val = getattr(self.args, alg) if val: resource[alg] = val artifact = Resource(resource) if artifact_cache.is_cached(artifact): print('Artifact is already cached!') sys.exit(0) try: artifact_id = artifact_cache.add(artifact) if self.args.verbose: print(artifact_id) except Exception as ex: if self.args.verbose: traceback.print_exc() else: print(ex) sys.exit(1) if self.args.cmd == 'ls': self.list() if self.args.cmd == 'rm': try: artifact_cache = ArtifactCache() artifact_cache.delete(self.args.uuid) print("Artifact removed") except Exception: print("Artifact doesn't exists") sys.exit(1) sys.exit(0)
class Resource(Descriptor): CHECK_INTEGRITY = True def __new__(cls, resource, **kwargs): if cls is Resource: if 'path' in resource: return super(Resource, cls).__new__(_PathResource) elif 'url' in resource: return super(Resource, cls).__new__(_UrlResource) elif 'git' in resource: return super(Resource, cls).__new__(_GitResource) elif 'md5' in resource: return super(Resource, cls).__new__(_PlainResource) raise CekitError("Resource type is not supported: %s" % resource) def __init__(self, descriptor): self.schemas = [ yaml.safe_load(""" map: name: {type: str} git: map: url: {type: str, required: True} ref: {type: str} path: {type: str, required: False} url: {type: str, required: False} md5: {type: str} sha1: {type: str} sha256: {type: str} sha512: {type: str} description: {type: str} target: {type: str} assert: \"val['git'] is not None or val['path'] is not None or val['url] is not None or val['md5'] is not None\"""" ) ] super(Resource, self).__init__(descriptor) self.skip_merging = ['md5', 'sha1', 'sha256', 'sha512'] # forwarded import to prevent circural imports from cekit.cache.artifact import ArtifactCache self.cache = ArtifactCache() self.name = descriptor['name'] self.description = None if 'description' in descriptor: self.description = descriptor['description'] def __eq__(self, other): # All subclasses of Resource are considered same object type if isinstance(other, Resource): return self['name'] == other['name'] return NotImplemented def __ne__(self, other): # All subclasses of Resource are considered same object type if isinstance(other, Resource): return not self['name'] == other['name'] return NotImplemented def _copy_impl(self, target): raise NotImplementedError("Implement _copy_impl() for Resource: " + self.__module__ + "." + type(self).__name__) def target_file_name(self): if 'target' not in self: self['target'] = os.path.basename(self.name) return self['target'] def copy(self, target=os.getcwd()): if os.path.isdir(target): target = os.path.join(target, self.target_file_name()) logger.info("Preparing resource '%s'" % (self.name)) if os.path.exists(target) and self.__verify(target): logger.debug("Local resource '%s' exists and is valid" % self.name) return target cached_resource = self.cache.cached(self) if cached_resource: shutil.copy(cached_resource['cached_path'], target) logger.info("Using cached artifact '%s'." % self.name) else: try: self.cache.add(self) cached_resource = self.cache.get(self) shutil.copy(cached_resource['cached_path'], target) logger.info("Using cached artifact '%s'." % self.name) except ValueError: return self.guarded_copy(target) def guarded_copy(self, target): try: self._copy_impl(target) except Exception as ex: logger.warning( "Cekit is not able to fetch resource '%s' automatically. " "Please use cekit-cache command to add this artifact manually." % self.name) if self.description: logger.info(self.description) # exception is fatal we be logged before Cekit dies raise CekitError( "Error copying resource: '%s'. See logs for more info." % self.name, ex) if set(SUPPORTED_HASH_ALGORITHMS).intersection(self) and \ not self.__verify(target): raise CekitError('Artifact checksum verification failed!') return target def __verify(self, target): """ Checks all defined check_sums for an aritfact """ if not set(SUPPORTED_HASH_ALGORITHMS).intersection(self): logger.debug("Artifact '%s' lacks any checksum definition." % self.name) return False if not Resource.CHECK_INTEGRITY: logger.info("Integrity checking disabled, skipping verification.") return True if os.path.isdir(target): logger.info("Target is directory, cannot verify checksum.") return True for algorithm in SUPPORTED_HASH_ALGORITHMS: if algorithm in self and self[algorithm]: if not check_sum(target, algorithm, self[algorithm], self['name']): return False return True def __substitute_cache_url(self, url): cache = config.get('common', 'cache_url') if not cache: return url for algorithm in SUPPORTED_HASH_ALGORITHMS: if algorithm in self: logger.debug( "Using {} checksum to fetch artifacts from cacher".format( algorithm)) url = cache.replace('#filename#', self.name).replace( '#algorithm#', algorithm).replace('#hash#', self[algorithm]) logger.debug("Using cache url '{}'".format(url)) return url def _download_file(self, url, destination, use_cache=True): """ Downloads a file from url and save it as destination """ if use_cache: url = self.__substitute_cache_url(url) if not url: raise CekitError( "Artifact %s cannot be downloaded, no URL provided" % self.name) logger.debug("Downloading from '%s' as %s" % (url, destination)) parsedUrl = urlparse(url) if parsedUrl.scheme == 'file' or not parsedUrl.scheme: if os.path.isdir(parsedUrl.path): shutil.copytree(parsedUrl.path, destination) else: shutil.copy(parsedUrl.path, destination) elif parsedUrl.scheme in ['http', 'https']: verify = config.get('common', 'ssl_verify') if str(verify).lower() == 'false': verify = False ctx = ssl.create_default_context() if not verify: ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE res = urlopen(url, context=ctx) if res.getcode() != 200: raise CekitError("Could not download file from %s" % url) try: with open(destination, 'wb') as f: while True: chunk = res.read(1048576) # 1 MB if not chunk: break f.write(chunk) except Exception: try: logger.debug( "Removing incompletely downloaded '{}' file".format( destination)) os.remove(destination) except OSError: logger.warning( "An error occurred while removing file '{}'".format( destination)) raise else: raise CekitError("Unsupported URL scheme: %s" % (url))
class Resource(Descriptor): """ Base class for handling resources. In most cases resources are synonym to artifacts. """ CHECK_INTEGRITY = True def __init__(self, descriptor): # Schema must be provided by the implementing class if not self.schema: raise CekitError("Resource '{}' has no schema defined".format( type(self).__name__)) # Includes validation super(Resource, self).__init__(descriptor) # Make sure the we have 'name' set self._ensure_name(descriptor) # Make sure the we have 'target' set self._ensure_target(descriptor) # Add a single slash at the end of the 'dest' value self._normalize_dest(descriptor) # Convert the dictionary into a Map object for easier access self._descriptor = self.__to_map(descriptor) self.skip_merging = ['md5', 'sha1', 'sha256', 'sha512'] # forwarded import to prevent circular imports from cekit.cache.artifact import ArtifactCache self.cache = ArtifactCache() def __to_map(self, dictionary): """ Convert provided dictionary, recursively, into a Map object. This will make it possible to access nested elements via properties: res.git.url instead of: res.git['url] """ if not isinstance(dictionary, dict): return dictionary converted = Map() for key in dictionary: converted[key] = self.__to_map(dictionary[key]) return converted def __eq__(self, other): # All subclasses of Resource are considered same object type if isinstance(other, Resource): return self['name'] == other['name'] return NotImplemented def __ne__(self, other): # All subclasses of Resource are considered same object type if isinstance(other, Resource): return not self['name'] == other['name'] return NotImplemented def _ensure_name(self, descriptor): """ Makes sure the 'name' attribute exists. If it does not, a default value will be computed based on the implementation type of the resource class. """ # If the 'name' key is present and there is a value, we have nothing to do if descriptor.get('name') is not None: return # Get the default value set for particular resource type default = self._get_default_name_value(descriptor) # pylint: disable=assignment-from-none # If there is still no default, we need to fail, because 'name' is required. # If we ever get here, it is a bug and should be reported. if not default: raise CekitError(( "Internal error: no value found for 'name' in '{}' artifact; unable to generate default value, " + "please report it: https://github.com/cekit/cekit/issues" ).format(descriptor)) logger.warning( "No value found for 'name' in '{}' artifact; using auto-generated value of '{}'" .format(json.dumps(descriptor, sort_keys=True), default)) descriptor['name'] = default def _ensure_target(self, descriptor): if descriptor.get('target') is not None: return descriptor['target'] = self._get_default_target_value(descriptor) def _normalize_dest(self, descriptor): """ Make sure that the 'dest' value, if provided, does end with a single slash. """ if descriptor.get('dest') is not None: descriptor['dest'] = os.path.normpath(descriptor.get('dest')) + '/' def _get_default_name_value(self, descriptor): # pylint: disable=unused-argument """ Returns default identifier value for particular class. This method must be overridden in classes extending Resource. Returned should be a string that will be be a unique identifier of the resource across thw whole image. """ return None def _get_default_target_value(self, descriptor): # pylint: disable=unused-argument return os.path.basename(descriptor.get('name')) def _copy_impl(self, target): raise NotImplementedError("Implement _copy_impl() for Resource: " + self.__module__ + "." + type(self).__name__) def copy(self, target=os.getcwd()): if os.path.isdir(target): target = os.path.join(target, self.target) logger.info("Copying resource '{}'...".format(self.name)) if os.path.exists(target) and self.__verify(target): logger.debug("Local resource '{}' exists and is valid".format( self.name)) return target cached_resource = self.cache.cached(self) if cached_resource: shutil.copy(cached_resource['cached_path'], target) logger.info("Using cached artifact '{}'.".format(self.name)) else: try: self.cache.add(self) cached_resource = self.cache.get(self) shutil.copy(cached_resource['cached_path'], target) logger.info("Using cached artifact '{}'.".format(self.name)) except ValueError: return self.guarded_copy(target) def guarded_copy(self, target): try: self._copy_impl(target) except Exception as ex: logger.warning( "Cekit is not able to fetch resource '{}' automatically. " "Please use cekit-cache command to add this artifact manually." .format(self.name)) if self.description: logger.info(self.description) # exception is fatal we be logged before Cekit dies raise CekitError( "Error copying resource: '%s'. See logs for more info." % self.name, ex) if set(SUPPORTED_HASH_ALGORITHMS).intersection(self) and \ not self.__verify(target): raise CekitError('Artifact checksum verification failed!') return target def __verify(self, target): """ Checks all defined check_sums for an aritfact """ if not set(SUPPORTED_HASH_ALGORITHMS).intersection(self): logger.debug("Artifact '{}' lacks any checksum definition.".format( self.name)) return False if not Resource.CHECK_INTEGRITY: logger.info("Integrity checking disabled, skipping verification.") return True if os.path.isdir(target): logger.info("Target is directory, cannot verify checksum.") return True for algorithm in SUPPORTED_HASH_ALGORITHMS: if algorithm in self and self[algorithm]: if not check_sum(target, algorithm, self[algorithm], self['name']): return False return True def __substitute_cache_url(self, url): cache = config.get('common', 'cache_url') if not cache: return url for algorithm in SUPPORTED_HASH_ALGORITHMS: if algorithm in self: logger.debug( "Using {} checksum to fetch artifacts from cacher".format( algorithm)) url = cache.replace('#filename#', self.name).replace( '#algorithm#', algorithm).replace('#hash#', self[algorithm]) logger.debug("Using cache url '{}'".format(url)) return url def _download_file(self, url, destination, use_cache=True): """ Downloads a file from url and save it as destination """ if use_cache: url = self.__substitute_cache_url(url) if not url: raise CekitError( "Artifact %s cannot be downloaded, no URL provided" % self.name) logger.debug("Downloading from '{}' as {}".format(url, destination)) parsed_url = urlparse(url) if parsed_url.scheme == 'file' or not parsed_url.scheme: if os.path.isdir(parsed_url.path): shutil.copytree(parsed_url.path, destination) else: shutil.copy(parsed_url.path, destination) elif parsed_url.scheme in ['http', 'https']: verify = config.get('common', 'ssl_verify') if str(verify).lower() == 'false': verify = False ctx = ssl.create_default_context() if not verify: ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE res = urlopen(url, context=ctx) if res.getcode() != 200: raise CekitError("Could not download file from %s" % url) try: with open(destination, 'wb') as f: while True: chunk = res.read(1048576) # 1 MB if not chunk: break f.write(chunk) except Exception: try: logger.debug( "Removing incompletely downloaded '{}' file".format( destination)) os.remove(destination) except OSError: logger.warning( "An error occurred while removing file '{}'".format( destination)) raise else: raise CekitError("Unsupported URL scheme: {}".format(url))