def check_config(toml_config_pre, toml_config_post): """ :param toml_config_pre: dictionary for the value before change :type toml_config_pre: dcos.api.config.Toml :param toml_config_post: dictionary for the value with change :type toml_config_post: dcos.api.config.Toml :returns: process status :rtype: int """ errors_pre = util.validate_json(toml_config_pre._dictionary, generate_root_schema(toml_config_pre)) errors_post = util.validate_json(toml_config_post._dictionary, generate_root_schema(toml_config_post)) logger.info('Comparing changes in the configuration...') logger.info('Errors before the config command: %r', errors_pre) logger.info('Errors after the config command: %r', errors_post) if len(errors_post) != 0: if len(errors_pre) == 0: raise DCOSException(util.list_to_err(errors_post)) def _errs(errs): return set([e.split('\n')[0] for e in errs]) diff_errors = _errs(errors_post) - _errs(errors_pre) if len(diff_errors) != 0: raise DCOSException(util.list_to_err(errors_post))
def check_config(toml_config_pre, toml_config_post, section): """ :param toml_config_pre: dictionary for the value before change :type toml_config_pre: dcos.api.config.Toml :param toml_config_post: dictionary for the value with change :type toml_config_post: dcos.api.config.Toml :param section: section of the config to check :type section: str :returns: process status :rtype: int """ errors_pre = util.validate_json(toml_config_pre._dictionary[section], get_config_schema(section)) errors_post = util.validate_json(toml_config_post._dictionary[section], get_config_schema(section)) logger.info('Comparing changes in the configuration...') logger.info('Errors before the config command: %r', errors_pre) logger.info('Errors after the config command: %r', errors_post) if len(errors_post) != 0: if len(errors_pre) == 0: raise DCOSException(util.list_to_err(errors_post)) def _errs(errs): return set([e.split('\n')[0] for e in errs]) diff_errors = _errs(errors_post) - _errs(errors_pre) if len(diff_errors) != 0: raise DCOSException(util.list_to_err(errors_post))
def _replace_directly(build_definition, build_schema, build_definition_directory, ref): """ Replaces the local reference ref with the contents of the file pointed to by ref :param build_definition: The DC/OS Package Build Definition that may contain local references :type build_definition: dict :param build_definition_directory: The directory of the Build Definition :type build_definition_directory: str :param build_schema: The schema for the Build Definition :type build_schema: dict :param ref: The key in build_definition that will be replaced :type ref: str """ if ref in build_definition and _is_local_reference(build_definition[ref]): location = build_definition[ref][1:] if not os.path.isabs(location): location = os.path.join(build_definition_directory, location) with util.open_file(location) as f: contents = util.load_json(f, True) build_definition[ref] = contents errs = util.validate_json(build_definition, build_schema) if errs: logger.debug("Failed during resolution of {}: \n" "\tbuild definition: {}" "".format(ref, build_definition)) raise DCOSException(_validation_error(location))
def _group_add(group_resource): """ :param group_resource: optional filename for the group resource :type group_resource: str :returns: process return code :rtype: int """ group_resource = _get_resource(group_resource) schema = _data_schema() errs = util.validate_json(group_resource, schema) if errs: raise DCOSException(util.list_to_err(errs)) client = marathon.create_client() # Check that the group doesn't exist group_id = client.normalize_app_id(group_resource['id']) try: client.get_group(group_id) except DCOSException as e: logger.exception(e) else: raise DCOSException("Group '{}' already exists".format(group_id)) client.create_group(group_resource) return 0
def _add(app_resource): """ :param app_resource: optional filename for the application resource :type app_resource: str :returns: process return code :rtype: int """ application_resource = _get_resource(app_resource) # Add application to marathon client = marathon.create_client() schema = client.get_app_schema() if schema is None: schema = _app_schema() errs = util.validate_json(application_resource, schema) if errs: raise DCOSException(util.list_to_err(errs)) # Check that the application doesn't exist app_id = client.normalize_app_id(application_resource['id']) try: client.get_app(app_id) except DCOSException as e: logger.exception(e) else: raise DCOSException("Application '{}' already exists".format(app_id)) client.add_app(application_resource) return 0
def _validate_json_file(fullpath): """Validates the content of the file against its schema. Throws an exception if the file is not valid. :param fullpath: full path to the file. :type fullpath: str :return: json object if it is a special file :rtype: dict """ filename = os.path.basename(fullpath) if filename in ['command.json', 'config.json', 'package.json']: schema_path = 'data/universe-schema/{}'.format(filename) else: raise DCOSException( ('Error bundling package. Unknown file in package ' 'directory [{}]').format(fullpath)) special_schema = util.load_jsons( pkg_resources.resource_string('dcoscli', schema_path).decode('utf-8')) with util.open_file(fullpath) as special_file: special_json = util.load_json(special_file) errs = util.validate_json(special_json, special_schema) if errs: emitter.publish( errors.DefaultError( 'Error validating JSON file [{}]'.format(fullpath))) raise DCOSException(util.list_to_err(errs)) return special_json
def _replace_marathon(build_definition, build_schema, build_definition_directory): """ Replaces the marathon v2AppMustacheTemplate ref with the base64 encoding of the file pointed to by the reference :param build_definition: The DC/OS Package Build Definition that may contain local references :type build_definition: dict :param build_definition_directory: The directory of the Build Definition :type build_definition_directory: str :param build_schema: The schema for the Build Definition :type build_schema: dict """ ref = "marathon" template = "v2AppMustacheTemplate" if ref in build_definition and \ _is_local_reference(build_definition[ref][template]): location = (build_definition[ref])[template][1:] if not os.path.isabs(location): location = os.path.join(build_definition_directory, location) # convert the contents of the marathon file into base64 with util.open_file(location) as f: contents = base64.b64encode(f.read().encode()).decode() build_definition[ref][template] = contents errs = util.validate_json(build_definition, build_schema) if errs: logger.debug("Failed during resolution of marathon: \n" "\tbuild definition: {}" "".format(build_definition)) raise DCOSException(_validation_error(location))
def _validate_json_file(fullpath): """Validates the content of the file against its schema. Throws an exception if the file is not valid. :param fullpath: full path to the file. :type fullpath: str :return: json object if it is a special file :rtype: dict """ filename = os.path.basename(fullpath) if filename in ['command.json', 'config.json', 'package.json']: schema_path = 'data/universe-schema/{}'.format(filename) else: raise DCOSException(('Error bundling package. Unknown file in package ' 'directory [{}]').format(fullpath)) special_schema = util.load_jsons( pkg_resources.resource_string('dcoscli', schema_path).decode('utf-8')) with util.open_file(fullpath) as special_file: special_json = util.load_json(special_file) errs = util.validate_json(special_json, special_schema) if errs: emitter.publish( errors.DefaultError( 'Error validating JSON file [{}]'.format(fullpath))) raise DCOSException(util.list_to_err(errs)) return special_json
def _replace_marathon(build_definition, build_schema, build_definition_directory): """ Replaces the marathon v2AppMustacheTemplate ref with the base64 encoding of the file pointed to by the reference :param build_definition: The DC/OS Package Build Definition that may contain local references :type build_definition: dict :param build_definition_directory: The directory of the Build Definition :type build_definition_directory: str :param build_schema: The schema for the Build Definition :type build_schema: dict """ ref = "marathon" template = "v2AppMustacheTemplate" if ref in build_definition and \ _is_local_reference(build_definition[ref][template]): location = (build_definition[ref])[template][1:] if not os.path.isabs(location): location = os.path.join(build_definition_directory, location) # convert the contents of the marathon file into base64 with util.open_file(location) as f: contents = base64.b64encode( f.read().encode()).decode() build_definition[ref][template] = contents errs = util.validate_json(build_definition, build_schema) if errs: logger.debug("Failed during resolution of marathon: \n" "\tbuild definition: {}" "".format(build_definition)) raise DCOSException(_validation_error(location))
def options(self, revision, user_options): """Merges package options with user supplied options, validates, and returns the result. :param revision: the package revision to install :type revision: str :param user_options: package parameters :type user_options: dict :returns: a dictionary with the user supplied options :rtype: dict """ if user_options is None: user_options = {} config_schema = self.config_json(revision) default_options = _extract_default_values(config_schema) logger.info('Generated default options: %r', default_options) # Merge option overrides options = _merge_options(default_options, user_options) logger.info('Merged options: %r', options) # Validate options with the config schema errs = util.validate_json(options, config_schema) if len(errs) != 0: raise DCOSException( "{}\n\n{}".format( util.list_to_err(errs), 'Please create a JSON file with the appropriate options, ' 'and pass the /path/to/file as an --options argument.')) return options
def test_service(): returncode, stdout, stderr = exec_command(['dcos', 'service', '--json']) services = get_services(1) schema = _get_schema(framework_fixture()) for srv in services: assert not util.validate_json(srv, schema)
def test_node(): returncode, stdout, stderr = exec_command(['dcos', 'node', '--json']) assert returncode == 0 assert stderr == b'' nodes = json.loads(stdout.decode('utf-8')) schema = _get_schema(slave_fixture()) for node in nodes: assert not util.validate_json(node, schema)
def test_node(): returncode, stdout, stderr = exec_command(["dcos", "node", "--json"]) assert returncode == 0 assert stderr == b"" nodes = json.loads(stdout.decode("utf-8")) schema = _get_schema(slave_fixture()) for node in nodes: assert not util.validate_json(node, schema)
def test_node(): returncode, stdout, stderr = exec_command( ['dcos', 'node', 'list', '--json']) assert returncode == 0 assert stderr == b'' nodes = json.loads(stdout.decode('utf-8')) assert len(nodes) > 0 slave_nodes = [node for node in nodes if node['type'] == 'agent'] schema = _get_schema(slave_fixture()) for node in slave_nodes: assert util.validate_json(node, schema)
def test_task(): # test `dcos task` output returncode, stdout, stderr = exec_command(['dcos', 'task', '--json']) assert returncode == 0 assert stderr == b'' tasks = json.loads(stdout.decode('utf-8')) assert isinstance(tasks, collections.Sequence) assert len(tasks) == NUM_TASKS schema = create_schema(task_fixture().dict()) for task in tasks: assert not util.validate_json(task, schema)
def test_task(): # test `dcos task` output returncode, stdout, stderr = exec_command(["dcos", "task", "--json"]) assert returncode == 0 assert stderr == b"" tasks = json.loads(stdout.decode("utf-8")) assert isinstance(tasks, collections.Sequence) assert len(tasks) == NUM_TASKS schema = create_schema(task_fixture().dict()) for task in tasks: assert not util.validate_json(task, schema)
def _validate(): """ :returns: process status :rtype: int """ _, toml_config = _load_config() errs = util.validate_json(toml_config._dictionary, _generate_root_schema(toml_config)) if len(errs) != 0: emitter.publish(util.list_to_err(errs)) return 1 return 0
def _validate(): """ :returns: process status :rtype: int """ toml_config = util.get_config(True) errs = util.validate_json(toml_config._dictionary, config.generate_root_schema(toml_config)) if len(errs) != 0: emitter.publish(util.list_to_err(errs)) return 1 emitter.publish("Congratulations, your configuration is valid!") return 0
def test_task(): # test `dcos task` output returncode, stdout, stderr = exec_command(['dcos', 'task', '--json']) assert returncode == 0 assert stderr == b'' tasks = json.loads(stdout.decode('utf-8')) assert isinstance(tasks, collections.Sequence) assert len(tasks) == NUM_TASKS schema = create_schema(task_fixture().dict(), True) schema['required'].remove('labels') for task in tasks: assert not util.validate_json(task, schema)
def test_task(): # test `dcos task` output returncode, stdout, stderr = exec_command( ['dcos', 'task', 'list', '--json']) assert returncode == 0 assert stderr == b'' tasks = json.loads(stdout.decode('utf-8')) assert isinstance(tasks, Sequence) assert len(tasks) == NUM_TASKS schema = create_schema(task_fixture().dict(), True) schema['required'].remove('labels') for task in tasks: assert not util.validate_json(task, schema)
def test_task(): _install_sleep_task() # test `dcos task` output returncode, stdout, stderr = exec_command(['dcos', 'task', '--json']) assert returncode == 0 assert stderr == b'' tasks = json.loads(stdout.decode('utf-8')) assert isinstance(tasks, collections.Sequence) assert len(tasks) == 1 schema = create_schema(task_fixture().dict()) for task in tasks: assert not util.validate_json(task, schema) _uninstall_sleep()
def _validate_update(current_resource, properties, schema): """ Validate resource ("app" or "group") update :param current_resource: Marathon app definition :type current_resource: dict :param properties: resource JSON :type properties: dict :param schema: JSON schema used to verify properties :type schema: dict :rtype: None """ updated_resource = _clean_up_resource_definition(current_resource.copy()) updated_resource.update(properties) errs = util.validate_json(updated_resource, schema) if errs: raise DCOSException(util.list_to_err(errs))
def options(self, revision, user_options): """Merges package options with user supplied options, validates, and returns the result. :param revision: the package revision to install :type revision: str :param user_options: package parameters :type user_options: dict :returns: a dictionary with the user supplied options :rtype: dict """ if user_options is None: user_options = {} config_schema = self.config_json(revision) default_options = _extract_default_values(config_schema) if default_options is None: pkg = self.package_json(revision) msg = ("An object in the package's config.json is missing the " "required 'properties' feature:\n {}".format(config_schema)) if 'maintainer' in pkg: msg += "\nPlease contact the project maintainer: {}".format( pkg['maintainer']) raise DCOSException(msg) logger.info('Generated default options: %r', default_options) # Merge option overrides, second argument takes precedence options = _merge_options(default_options, user_options) logger.info('Merged options: %r', options) # Validate options with the config schema errs = util.validate_json(options, config_schema) if len(errs) != 0: raise DCOSException( "{}\n\n{}".format( util.list_to_err(errs), 'Please create a JSON file with the appropriate options, ' 'and pass the /path/to/file as an --options argument.')) return options
def options(self, revision, user_options): """Merges package options with user supplied options, validates, and returns the result. :param revision: the package revision to install :type revision: str :param user_options: package parameters :type user_options: dict :returns: a dictionary with the user supplied options :rtype: dict """ if user_options is None: user_options = {} config_schema = self.config_json(revision) default_options = _extract_default_values(config_schema) if default_options is None: pkg = self.package_json(revision) msg = ("An object in the package's config.json is missing the " "required 'properties' feature:\n {}".format(config_schema)) if 'maintainer' in pkg: msg += "\nPlease contact the project maintainer: {}".format( pkg['maintainer']) raise DCOSException(msg) logger.info('Generated default options: %r', default_options) # Merge option overrides options = _merge_options(default_options, user_options) logger.info('Merged options: %r', options) # Validate options with the config schema errs = util.validate_json(options, config_schema) if len(errs) != 0: raise DCOSException( "{}\n\n{}".format( util.list_to_err(errs), 'Please create a JSON file with the appropriate options, ' 'and pass the /path/to/file as an --options argument.')) return options
def test_validation(validation_check): result = util.validate_json(validation_check.properties, schema) assert result == validation_check.expected
def test_service(): services = get_services() schema = _get_schema(framework_fixture()) for srv in services: assert not util.validate_json(srv, schema)
def _build(output_json, build_definition, output_directory): """ Creates a DC/OS Package from a DC/OS Package Build Definition :param output_json: whether to output json :type output_json: None | bool :param build_definition: The Path to a DC/OS package build definition :type build_definition: str :param output_directory: The directory where the DC/OS Package will be stored :type output_directory: str :returns: The process status :rtype: int """ # get the path of the build definition cwd = os.getcwd() build_definition_path = build_definition if not os.path.isabs(build_definition_path): build_definition_path = os.path.join(cwd, build_definition_path) build_definition_directory = os.path.dirname(build_definition_path) if not os.path.exists(build_definition_path): raise DCOSException( "The file [{}] does not exist".format(build_definition_path)) # get the path to the output directory if output_directory is None: output_directory = cwd if not os.path.exists(output_directory): raise DCOSException( "The output directory [{}]" " does not exist".format(output_directory)) logger.debug("Using [%s] as output directory", output_directory) # load raw build definition with util.open_file(build_definition_path) as bd: build_definition_raw = util.load_json(bd, keep_order=True) # validate DC/OS Package Build Definition with local references build_definition_schema_path = "data/schemas/build-definition-schema.json" build_definition_schema = util.load_jsons( pkg_resources.resource_string( "dcoscli", build_definition_schema_path).decode()) errs = util.validate_json(build_definition_raw, build_definition_schema) if errs: logger.debug("Failed before resolution: \n" "\tbuild definition: {}" "".format(build_definition_raw)) raise DCOSException(_validation_error(build_definition_path)) # resolve local references in build definition _resolve_local_references( build_definition_raw, build_definition_schema, build_definition_directory ) # at this point all the local references have been resolved build_definition_resolved = build_definition_raw # validate resolved build definition metadata_schema_path = "data/schemas/metadata-schema.json" metadata_schema = util.load_jsons( pkg_resources.resource_string( "dcoscli", metadata_schema_path).decode()) errs = util.validate_json(build_definition_resolved, metadata_schema) if errs: logger.debug("Failed after resolution: \n" "\tbuild definition: {}" "".format(build_definition_resolved)) raise DCOSException('Error validating package: ' 'there was a problem resolving ' 'the local references in ' '[{}]'.format(build_definition_path)) # create the manifest manifest_json = { 'built-by': "dcoscli.version={}".format(dcoscli.version) } # create the metadata metadata_json = build_definition_resolved # create zip file with tempfile.NamedTemporaryFile() as temp_file: with zipfile.ZipFile( temp_file.file, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zip_file: metadata = json.dumps(metadata_json, indent=2).encode() zip_file.writestr("metadata.json", metadata) manifest = json.dumps(manifest_json, indent=2).encode() zip_file.writestr("manifest.json", manifest) # name the package appropriately temp_file.file.seek(0) dcos_package_name = '{}-{}-{}.dcos'.format( metadata_json['name'], metadata_json['version'], md5_hash_file(temp_file.file)) # get the dcos package path dcos_package_path = os.path.join(output_directory, dcos_package_name) if os.path.exists(dcos_package_path): raise DCOSException( 'Output file [{}] already exists'.format( dcos_package_path)) # create a new file to contain the package temp_file.file.seek(0) with util.open_file(dcos_package_path, 'w+b') as dcos_package: shutil.copyfileobj(temp_file.file, dcos_package) if output_json: message = {'package_path': dcos_package_path} else: message = 'Created DC/OS Universe Package [{}]'.format( dcos_package_path) emitter.publish(message) return 0
def _build(output_json, build_definition, output_directory): """ Creates a DC/OS Package from a DC/OS Package Build Definition :param output_json: whether to output json :type output_json: None | bool :param build_definition: The path to a DC/OS Package Build Definition :type build_definition: str :param output_directory: The directory where the DC/OS Package will be stored :type output_directory: str :returns: The process status :rtype: int """ # get the path of the build definition cwd = os.getcwd() build_definition_path = build_definition if not os.path.isabs(build_definition_path): build_definition_path = os.path.join(cwd, build_definition_path) build_definition_directory = os.path.dirname(build_definition_path) if not os.path.exists(build_definition_path): raise DCOSException( "The file [{}] does not exist".format(build_definition_path)) # get the path to the output directory if output_directory is None: output_directory = cwd if not os.path.exists(output_directory): raise DCOSException("The output directory [{}]" " does not exist".format(output_directory)) logger.debug("Using [%s] as output directory", output_directory) # load raw build definition with util.open_file(build_definition_path) as bd: build_definition_raw = util.load_json(bd, keep_order=True) # validate DC/OS Package Build Definition with local references build_definition_schema_path = "data/schemas/build-definition-schema.json" build_definition_schema = util.load_jsons( pkg_resources.resource_string("dcoscli", build_definition_schema_path).decode()) errs = util.validate_json(build_definition_raw, build_definition_schema) if errs: logger.debug("Failed before resolution: \n" "\tbuild definition: {}" "".format(build_definition_raw)) raise DCOSException(_validation_error(build_definition_path)) # resolve local references in build definition _resolve_local_references(build_definition_raw, build_definition_schema, build_definition_directory) # at this point all the local references have been resolved build_definition_resolved = build_definition_raw # validate resolved build definition metadata_schema_path = "data/schemas/metadata-schema.json" metadata_schema = util.load_jsons( pkg_resources.resource_string("dcoscli", metadata_schema_path).decode()) errs = util.validate_json(build_definition_resolved, metadata_schema) if errs: logger.debug("Failed after resolution: \n" "\tbuild definition: {}" "".format(build_definition_resolved)) raise DCOSException('Error validating package: ' 'there was a problem resolving ' 'the local references in ' '[{}]'.format(build_definition_path)) # create the manifest manifest_json = {'built-by': "dcoscli.version={}".format(dcoscli.version)} # create the metadata metadata_json = build_definition_resolved # create zip file with tempfile.NamedTemporaryFile() as temp_file: with zipfile.ZipFile(temp_file.file, mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True) as zip_file: metadata = json.dumps(metadata_json, indent=2).encode() zip_file.writestr("metadata.json", metadata) manifest = json.dumps(manifest_json, indent=2).encode() zip_file.writestr("manifest.json", manifest) # name the package appropriately temp_file.file.seek(0) dcos_package_name = '{}-{}-{}.dcos'.format( metadata_json['name'], metadata_json['version'], md5_hash_file(temp_file.file)) # get the dcos package path dcos_package_path = os.path.join(output_directory, dcos_package_name) if os.path.exists(dcos_package_path): raise DCOSException( 'Output file [{}] already exists'.format(dcos_package_path)) # create a new file to contain the package temp_file.file.seek(0) with util.open_file(dcos_package_path, 'w+b') as dcos_package: shutil.copyfileobj(temp_file.file, dcos_package) if output_json: message = {'package_path': dcos_package_path} else: message = 'Created DC/OS Universe Package [{}]'.format( dcos_package_path) emitter.publish(message) return 0