def check_config(toml_config_pre, toml_config_post): """ :param toml_config_pre: dictionary for the value before change :type toml_config_pre: dcos.api.config.Toml :param toml_config_post: dictionary for the value with change :type toml_config_post: dcos.api.config.Toml :returns: process status :rtype: int """ errors_pre = util.validate_json(toml_config_pre._dictionary, generate_root_schema(toml_config_pre)) errors_post = util.validate_json(toml_config_post._dictionary, generate_root_schema(toml_config_post)) logger.info('Comparing changes in the configuration...') logger.info('Errors before the config command: %r', errors_pre) logger.info('Errors after the config command: %r', errors_post) if len(errors_post) != 0: if len(errors_pre) == 0: raise DCOSException(util.list_to_err(errors_post)) def _errs(errs): return set([e.split('\n')[0] for e in errs]) diff_errors = _errs(errors_post) - _errs(errors_pre) if len(diff_errors) != 0: raise DCOSException(util.list_to_err(errors_post))
def check_config(toml_config_pre, toml_config_post, section): """ :param toml_config_pre: dictionary for the value before change :type toml_config_pre: dcos.api.config.Toml :param toml_config_post: dictionary for the value with change :type toml_config_post: dcos.api.config.Toml :param section: section of the config to check :type section: str :returns: process status :rtype: int """ errors_pre = util.validate_json(toml_config_pre._dictionary[section], get_config_schema(section)) errors_post = util.validate_json(toml_config_post._dictionary[section], get_config_schema(section)) logger.info('Comparing changes in the configuration...') logger.info('Errors before the config command: %r', errors_pre) logger.info('Errors after the config command: %r', errors_post) if len(errors_post) != 0: if len(errors_pre) == 0: raise DCOSException(util.list_to_err(errors_post)) def _errs(errs): return set([e.split('\n')[0] for e in errs]) diff_errors = _errs(errors_post) - _errs(errors_pre) if len(diff_errors) != 0: raise DCOSException(util.list_to_err(errors_post))
def _add(app_resource): """ :param app_resource: optional filename for the application resource :type app_resource: str :returns: process return code :rtype: int """ application_resource = _get_resource(app_resource) # Add application to marathon client = marathon.create_client() schema = client.get_app_schema() if schema is None: schema = _app_schema() errs = util.validate_json(application_resource, schema) if errs: raise DCOSException(util.list_to_err(errs)) # Check that the application doesn't exist app_id = client.normalize_app_id(application_resource['id']) try: client.get_app(app_id) except DCOSException as e: logger.exception(e) else: raise DCOSException("Application '{}' already exists".format(app_id)) client.add_app(application_resource) return 0
def _group_add(group_resource): """ :param group_resource: optional filename for the group resource :type group_resource: str :returns: process return code :rtype: int """ group_resource = _get_resource(group_resource) schema = _data_schema() errs = util.validate_json(group_resource, schema) if errs: raise DCOSException(util.list_to_err(errs)) client = marathon.create_client() # Check that the group doesn't exist group_id = client.normalize_app_id(group_resource['id']) try: client.get_group(group_id) except DCOSException as e: logger.exception(e) else: raise DCOSException("Group '{}' already exists".format(group_id)) client.create_group(group_resource) return 0
def _validate_json_file(fullpath): """Validates the content of the file against its schema. Throws an exception if the file is not valid. :param fullpath: full path to the file. :type fullpath: str :return: json object if it is a special file :rtype: dict """ filename = os.path.basename(fullpath) if filename in ['command.json', 'config.json', 'package.json']: schema_path = 'data/universe-schema/{}'.format(filename) else: raise DCOSException( ('Error bundling package. Unknown file in package ' 'directory [{}]').format(fullpath)) special_schema = util.load_jsons( pkg_resources.resource_string('dcoscli', schema_path).decode('utf-8')) with util.open_file(fullpath) as special_file: special_json = util.load_json(special_file) errs = util.validate_json(special_json, special_schema) if errs: emitter.publish( errors.DefaultError( 'Error validating JSON file [{}]'.format(fullpath))) raise DCOSException(util.list_to_err(errs)) return special_json
def _validate_json_file(fullpath): """Validates the content of the file against its schema. Throws an exception if the file is not valid. :param fullpath: full path to the file. :type fullpath: str :return: json object if it is a special file :rtype: dict """ filename = os.path.basename(fullpath) if filename in ['command.json', 'config.json', 'package.json']: schema_path = 'data/universe-schema/{}'.format(filename) else: raise DCOSException(('Error bundling package. Unknown file in package ' 'directory [{}]').format(fullpath)) special_schema = util.load_jsons( pkg_resources.resource_string('dcoscli', schema_path).decode('utf-8')) with util.open_file(fullpath) as special_file: special_json = util.load_json(special_file) errs = util.validate_json(special_json, special_schema) if errs: emitter.publish( errors.DefaultError( 'Error validating JSON file [{}]'.format(fullpath))) raise DCOSException(util.list_to_err(errs)) return special_json
def options(self, revision, user_options): """Merges package options with user supplied options, validates, and returns the result. :param revision: the package revision to install :type revision: str :param user_options: package parameters :type user_options: dict :returns: a dictionary with the user supplied options :rtype: dict """ if user_options is None: user_options = {} config_schema = self.config_json(revision) default_options = _extract_default_values(config_schema) logger.info('Generated default options: %r', default_options) # Merge option overrides options = _merge_options(default_options, user_options) logger.info('Merged options: %r', options) # Validate options with the config schema errs = util.validate_json(options, config_schema) if len(errs) != 0: raise DCOSException( "{}\n\n{}".format( util.list_to_err(errs), 'Please create a JSON file with the appropriate options, ' 'and pass the /path/to/file as an --options argument.')) return options
def _validate(): """ :returns: process status :rtype: int """ _, toml_config = _load_config() errs = util.validate_json(toml_config._dictionary, _generate_root_schema(toml_config)) if len(errs) != 0: emitter.publish(util.list_to_err(errs)) return 1 return 0
def _validate(): """ :returns: process status :rtype: int """ toml_config = util.get_config(True) errs = util.validate_json(toml_config._dictionary, config.generate_root_schema(toml_config)) if len(errs) != 0: emitter.publish(util.list_to_err(errs)) return 1 emitter.publish("Congratulations, your configuration is valid!") return 0
def _validate_update(current_resource, properties, schema): """ Validate resource ("app" or "group") update :param current_resource: Marathon app definition :type current_resource: dict :param properties: resource JSON :type properties: dict :param schema: JSON schema used to verify properties :type schema: dict :rtype: None """ updated_resource = _clean_up_resource_definition(current_resource.copy()) updated_resource.update(properties) errs = util.validate_json(updated_resource, schema) if errs: raise DCOSException(util.list_to_err(errs))
def options(self, revision, user_options): """Merges package options with user supplied options, validates, and returns the result. :param revision: the package revision to install :type revision: str :param user_options: package parameters :type user_options: dict :returns: a dictionary with the user supplied options :rtype: dict """ if user_options is None: user_options = {} config_schema = self.config_json(revision) default_options = _extract_default_values(config_schema) if default_options is None: pkg = self.package_json(revision) msg = ("An object in the package's config.json is missing the " "required 'properties' feature:\n {}".format(config_schema)) if 'maintainer' in pkg: msg += "\nPlease contact the project maintainer: {}".format( pkg['maintainer']) raise DCOSException(msg) logger.info('Generated default options: %r', default_options) # Merge option overrides, second argument takes precedence options = _merge_options(default_options, user_options) logger.info('Merged options: %r', options) # Validate options with the config schema errs = util.validate_json(options, config_schema) if len(errs) != 0: raise DCOSException( "{}\n\n{}".format( util.list_to_err(errs), 'Please create a JSON file with the appropriate options, ' 'and pass the /path/to/file as an --options argument.')) return options
def options(self, revision, user_options): """Merges package options with user supplied options, validates, and returns the result. :param revision: the package revision to install :type revision: str :param user_options: package parameters :type user_options: dict :returns: a dictionary with the user supplied options :rtype: dict """ if user_options is None: user_options = {} config_schema = self.config_json(revision) default_options = _extract_default_values(config_schema) if default_options is None: pkg = self.package_json(revision) msg = ("An object in the package's config.json is missing the " "required 'properties' feature:\n {}".format(config_schema)) if 'maintainer' in pkg: msg += "\nPlease contact the project maintainer: {}".format( pkg['maintainer']) raise DCOSException(msg) logger.info('Generated default options: %r', default_options) # Merge option overrides options = _merge_options(default_options, user_options) logger.info('Merged options: %r', options) # Validate options with the config schema errs = util.validate_json(options, config_schema) if len(errs) != 0: raise DCOSException( "{}\n\n{}".format( util.list_to_err(errs), 'Please create a JSON file with the appropriate options, ' 'and pass the /path/to/file as an --options argument.')) return options
def update_sources(config, validate=False): """Overwrites the local package cache with the latest source data. :param config: Configuration dictionary :type config: dcos.config.Toml :rtype: None """ errors = [] # ensure the cache directory is properly configured cache_dir = os.path.expanduser( util.get_config_vals(['package.cache'], config)[0]) # ensure the cache directory exists if not os.path.exists(cache_dir): os.makedirs(cache_dir) if not os.path.isdir(cache_dir): raise DCOSException( 'Cache directory does not exist! [{}]'.format(cache_dir)) # obtain an exclusive file lock on $CACHE/.lock lock_path = os.path.join(cache_dir, '.lock') with _acquire_file_lock(lock_path): # list sources sources = list_sources(config) for source in sources: emitter.publish('Updating source [{}]'.format(source)) # create a temporary staging directory with util.tempdir() as tmp_dir: stage_dir = os.path.join(tmp_dir, source.hash()) # copy to the staging directory try: source.copy_to_cache(stage_dir) except DCOSException as e: logger.exception( 'Failed to copy universe source %s to cache %s', source.url, stage_dir) errors.append(str(e)) continue # check version # TODO(jsancio): move this to the validation when it is forced Registry(source, stage_dir).check_version( LooseVersion('1.0'), LooseVersion('3.0')) # validate content if validate: validation_errors = Registry(source, stage_dir).validate() if len(validation_errors) > 0: errors += validation_errors continue # keep updating the other sources # remove the $CACHE/source.hash() directory target_dir = os.path.join(cache_dir, source.hash()) try: if os.path.exists(target_dir): shutil.rmtree(target_dir, onerror=_rmtree_on_error, ignore_errors=False) except OSError: logger.exception( 'Error removing target directory before move: %s', target_dir) err = "Could not remove directory [{}]".format(target_dir) errors.append(err) continue # keep updating the other sources # move the staging directory to $CACHE/source.hash() shutil.move(stage_dir, target_dir) if errors: raise DCOSException(util.list_to_err(errors))
def update_sources(config, validate=False): """Overwrites the local package cache with the latest source data. :param config: Configuration dictionary :type config: dcos.config.Toml :rtype: None """ errors = [] # ensure the cache directory is properly configured cache_dir = os.path.expanduser( util.get_config_vals(['package.cache'], config)[0]) # ensure the cache directory exists if not os.path.exists(cache_dir): os.makedirs(cache_dir) if not os.path.isdir(cache_dir): raise DCOSException( 'Cache directory does not exist! [{}]'.format(cache_dir)) # obtain an exclusive file lock on $CACHE/.lock lock_path = os.path.join(cache_dir, '.lock') with _acquire_file_lock(lock_path): # list sources sources = list_sources(config) for source in sources: emitter.publish('Updating source [{}]'.format(source)) # create a temporary staging directory with util.tempdir() as tmp_dir: stage_dir = os.path.join(tmp_dir, source.hash()) # copy to the staging directory try: source.copy_to_cache(stage_dir) except DCOSException as e: logger.exception( 'Failed to copy universe source %s to cache %s', source.url, stage_dir) errors.append(e.message) continue # check version # TODO(jsancio): move this to the validation when it is forced Registry(source, stage_dir).check_version( LooseVersion('1.0'), LooseVersion('2.0')) # validate content if validate: validation_errors = Registry(source, stage_dir).validate() if len(validation_errors) > 0: errors += validation_errors continue # keep updating the other sources # remove the $CACHE/source.hash() directory target_dir = os.path.join(cache_dir, source.hash()) try: if os.path.exists(target_dir): shutil.rmtree(target_dir, onerror=_rmtree_on_error, ignore_errors=False) except OSError: logger.exception( 'Error removing target directory before move: %s', target_dir) err = "Could not remove directory [{}]".format(target_dir) errors.append(err) continue # keep updating the other sources # move the staging directory to $CACHE/source.hash() shutil.move(stage_dir, target_dir) if errors: raise DCOSException(util.list_to_err(errors))