def test_parse_yaml(self, yaml_mock, open_mock): path = '/tmp/path' file_mock = mock.MagicMock() open_mock.return_value = file_mock utils.parse_yaml(path) open_mock.assert_called_once_with(path) yaml_mock.load.assert_called_once_with(file_mock)
def check_deployment_tasks(self): logger.debug( 'Start deployment tasks checking "%s"', self.deployment_tasks_path) deployment_tasks = utils.parse_yaml(self.deployment_tasks_path) schemas = { 'puppet': self.schema.puppet_task, 'shell': self.schema.shell_task, 'group': self.schema.group_task, 'skipped': self.schema.skipped_task, 'copy_files': self.schema.copy_files, 'sync': self.schema.sync, 'upload_file': self.schema.upload_file, 'stage': self.schema.stage, 'reboot': self.schema.reboot} for idx, deployment_task in enumerate(deployment_tasks): if deployment_task['type'] not in schemas: error_msg = 'There is no such task type:' \ '{0}'.format(deployment_task['type']) raise errors.ValidationError(error_msg) self.validate_schema( deployment_task, schemas[deployment_task['type']], self.deployment_tasks_path, value_path=[idx])
def check_deployment_tasks(self): logger.debug( 'Start deployment tasks checking "%s"', self.deployment_tasks_path) deployment_tasks = utils.parse_yaml(self.deployment_tasks_path) schemas = { 'puppet': self.schema.puppet_task, 'shell': self.schema.shell_task, 'group': self.schema.group_task, 'skipped': self.schema.skipped_task, 'copy_files': self.schema.copy_files_task, 'sync': self.schema.sync_task, 'upload_file': self.schema.upload_file_task, 'stage': self.schema.stage_task, 'reboot': self.schema.reboot_task} for idx, deployment_task in enumerate(deployment_tasks): if deployment_task['type'] not in schemas: error_msg = 'There is no such task type:' \ '{0}'.format(deployment_task['type']) raise errors.ValidationError(error_msg) self.validate_schema( deployment_task, schemas[deployment_task['type']], self.deployment_tasks_path, value_path=[idx])
def __init__(self, plugin_path): self.plugin_path = plugin_path self.pre_build_hook_path = join_path(plugin_path, 'pre_build_hook') self.meta = utils.parse_yaml(join_path(plugin_path, 'metadata.yaml')) self.build_dir = join_path(plugin_path, '.build') self.build_src_dir = join_path(self.build_dir, 'src') self.checksums_path = join_path(self.build_src_dir, 'checksums.sha1')
def check_env_config_attrs(self): """Check attributes in environment config file. 'attributes' is not required field, but if it's present it should contain UI elements OR metadata structure. """ config = utils.parse_yaml(self.env_conf_path) if not config: return self.validate_schema( config, self.schema.attr_root_schema, self.env_conf_path) attrs = config.get('attributes', {}) for attr_id, attr in six.iteritems(attrs): schema = self.schema.attr_element_schema # Metadata object is totally different # from the others, we have to set different # validator for it if attr_id == 'metadata': schema = self.schema.attr_meta_schema self.validate_schema( attr, schema, self.env_conf_path, value_path=['attributes', attr_id])
def validate_file_by_schema(self, schema, file_path, allow_not_exists=False, allow_empty=False): """Validate file with given JSON schema. :param schema: object dict :type schema: object :param file_path: path to the file :type file_path: basestring :param allow_not_exists: if true don't raise error on missing file :type allow_not_exists: bool :param allow_empty: allow file to contain no json :type allow_empty: bool :return: """ if not utils.exists(file_path): if allow_not_exists: logger.debug('No file "%s". Skipping check.', file_path) return else: raise errors.FileDoesNotExist(file_path) data = utils.parse_yaml(file_path) if data is not None: self.validate_schema(data, schema, file_path) else: if not allow_empty: raise errors.FileIsEmpty(file_path)
def check_env_config_attrs(self): """Check attributes in environment config file. 'attributes' is not required field, but if it's present it should contain UI elements OR metadata structure. """ config = utils.parse_yaml(self.env_conf_path) if not config: return self.validate_schema(config, self.schema.attr_root_schema, self.env_conf_path) attrs = config.get('attributes', {}) for attr_id, attr in six.iteritems(attrs): schema = self.schema.attr_element_schema # Metadata object is totally different # from the others, we have to set different # validator for it if attr_id == 'metadata': schema = self.schema.attr_meta_schema self.validate_schema(attr, schema, self.env_conf_path, value_path=['attributes', attr_id])
def _parse_tasks(self): if utils.exists(self.tasks_path): tasks = utils.parse_yaml(self.tasks_path) # Tasks schema is not checked in check_schemas, thus # we perform manual check on parsing tasks file if tasks is None: raise errors.FileIsEmpty(self.tasks_path) return None
def __init__(self, plugin_path): self.plugin_path = plugin_path self.pre_build_hook_cmd = './pre_build_hook' self.meta = utils.parse_yaml( join_path(self.plugin_path, 'metadata.yaml')) self.build_dir = join_path(self.plugin_path, '.build') self.build_src_dir = join_path(self.build_dir, 'src') self.checksums_path = join_path(self.build_src_dir, 'checksums.sha1') self.name = self.meta['name']
def __init__(self, plugin_path): self.plugin_path = plugin_path self.pre_build_hook_cmd = './pre_build_hook' self.meta = utils.parse_yaml( join_path(self.plugin_path, 'metadata.yaml') ) self.build_dir = join_path(self.plugin_path, '.build') self.build_src_dir = join_path(self.build_dir, 'src') self.checksums_path = join_path(self.build_src_dir, 'checksums.sha1') self.name = self.meta['name'] self.plugin_version, self.full_version = utils.version_split_name_rpm( self.meta['version'])
def validate_file_by_schema(self, schema, file_path, check_file_exists=True): if not check_file_exists and not utils.exists(file_path): logger.debug('No file "%s". Skipping check.', file_path) return if not utils.exists(file_path): raise errors.FileDoesNotExist(file_path) data = utils.parse_yaml(file_path) if data is None: raise errors.FileIsEmpty(file_path) self.validate_schema(data, schema, file_path)
def check_tasks(self): """Json schema doesn't have any conditions, so we have to make sure here, that puppet task is really puppet and shell task is correct too """ logger.debug('Start tasks checking "%s"', self.tasks_path) tasks = utils.parse_yaml(self.tasks_path) for task in tasks: if task['type'] == 'puppet': schema = v1.PUPPET_PARAMETERS elif task['type'] == 'shell': schema = v1.SHELL_PARAMETERS self.validate_schema(task['parameters'], schema, self.tasks_path)
def check_releases_paths(self): meta = utils.parse_yaml(self.meta_path) for release in meta['releases']: scripts_path = join_path(self.plugin_path, release['deployment_scripts_path']) repo_path = join_path(self.plugin_path, release['repository_path']) wrong_paths = [] for path in [scripts_path, repo_path]: if not utils.exists(path): wrong_paths.append(path) if wrong_paths: raise errors.ReleasesDirectoriesError( 'Cannot find directories {0} for release "{1}"'.format( ', '.join(wrong_paths), release))
def check_deployment_tasks(self): logger.debug( 'Start deployment tasks checking "%s"', self.deployment_tasks_path) deployment_tasks = utils.parse_yaml(self.deployment_tasks_path) schemas = { 'puppet': self.schema.puppet_task, 'shell': self.schema.shell_task, 'group': self.schema.group_task} for idx, deployment_task in enumerate(deployment_tasks): self.validate_schema( deployment_task, schemas[deployment_task['type']], self.deployment_tasks_path, value_path=[idx])
def check_compatibility(self): """Json schema doesn't have any conditions, so we have to make sure here, that this validation schema can be used for described fuel releases """ meta = utils.parse_yaml(self.meta_path) for fuel_release in meta['fuel_version']: if StrictVersion(fuel_release) < StrictVersion(self.basic_version): raise errors.ValidationError( 'Current plugin format {0} is not compatible with {2} Fuel' ' release. Fuel version must be {1} or higher.' ' Please remove {2} version from metadata.yaml file or' ' downgrade package_version.' .format( meta['package_version'], self.basic_version, fuel_release))
def check_compatibility(self): """Json schema doesn't have any conditions, so we have to make sure here, that this validation schema can be used for described fuel releases """ meta = utils.parse_yaml(self.meta_path) for fuel_release in meta['fuel_version']: if StrictVersion(fuel_release) < StrictVersion(self.basic_version): raise errors.ValidationError( 'Current plugin format {0} is not compatible with {2} Fuel' ' release. Fuel version must be {1} or higher.' ' Please remove {2} version from metadata.yaml file or' ' downgrade package_version.'.format( meta['package_version'], self.basic_version, fuel_release))
def check_tasks(self): """Json schema doesn't have any conditions, so we have to make sure here, that puppet task is really puppet and shell task is correct too """ logger.debug('Start tasks checking "%s"', self.tasks_path) tasks = utils.parse_yaml(self.tasks_path) schemas = { 'puppet': self.schema.puppet_parameters, 'shell': self.schema.shell_parameters} for idx, task in enumerate(tasks): self.validate_schema( task['parameters'], schemas[task['type']], self.tasks_path, value_path=[idx, 'parameters'])
def get_version_mapping_from_plugin(plugin_path): """Returns mapping for specific version of the plugin :param str plugin_path: path to the directory with metadata.yaml file :returns: dict which contains 'version' - package version 'validator' - validator class 'templates' - path to templates 'builder' - builder class """ meta_path = join_path(plugin_path, 'metadata.yaml') if not utils.exists(meta_path): errors.WrongPluginDirectoryError( 'Wrong path to the plugin, cannot find "%s" file', meta_path) meta = utils.parse_yaml(meta_path) package_version = meta.get('package_version') return get_plugin_for_version(package_version)
def check_releases_paths(self): meta = utils.parse_yaml(self.meta_path) for release in meta['releases']: scripts_path = join_path( self.plugin_path, release['deployment_scripts_path']) repo_path = join_path( self.plugin_path, release['repository_path']) wrong_paths = [] for path in [scripts_path, repo_path]: if not utils.exists(path): wrong_paths.append(path) if wrong_paths: raise errors.ReleasesDirectoriesError( 'Cannot find directories {0} for release "{1}"'.format( ', '.join(wrong_paths), release))
def check_tasks(self): """Check legacy tasks.yaml.""" logger.debug('Start tasks checking "%s"', self.tasks_path) if utils.exists(self.tasks_path): # todo(ikutukov): remove self._check_tasks tasks = utils.parse_yaml(self.tasks_path) if tasks is None: return schemas = { 'puppet': self.schema.puppet_parameters, 'shell': self.schema.shell_parameters, 'reboot': self.schema.reboot_parameters} for idx, task in enumerate(tasks): self.validate_schema( task.get('parameters'), schemas[task['type']], self.tasks_path, value_path=[idx, 'parameters']) else: logger.debug('File "%s" doesn\'t exist', self.tasks_path)
def check_deployment_tasks(self): logger.debug( 'Start deployment tasks checking "%s"', self.deployment_tasks_path) deployment_tasks = utils.parse_yaml(self.deployment_tasks_path) schemas = { 'puppet': self.schema.puppet_task, 'shell': self.schema.shell_task, 'group': self.schema.group_task, 'skipped': self.schema.skipped_task, 'copy_files': self.schema.copy_files_task, 'sync': self.schema.sync_task, 'upload_file': self.schema.upload_file_task, 'stage': self.schema.stage_task, 'reboot': self.schema.reboot_task} for idx, deployment_task in enumerate(deployment_tasks): if deployment_task['type'] not in schemas: error_msg = 'There is no such task type:' \ '{0}'.format(deployment_task['type']) raise errors.ValidationError(error_msg) if deployment_task['type'] not in self.schema.roleless_tasks: for role_alias in self.schema.role_aliases: deployment_role = deployment_task.get(role_alias) if deployment_role: break else: logger.warn( 'Task {0} does not contain {1} fields. That ' 'may lead to tasks being unassigned to nodes.'. format(deployment_task['id'], '/'. join(self.schema.role_aliases))) self.validate_schema( deployment_task, schemas[deployment_task['type']], self.deployment_tasks_path, value_path=[idx])
def check_tasks(self): """Check legacy tasks.yaml.""" logger.debug('Start tasks checking "%s"', self.tasks_path) if utils.exists(self.tasks_path): # todo(ikutukov): remove self._check_tasks tasks = utils.parse_yaml(self.tasks_path) if tasks is None: return schemas = { 'puppet': self.schema.puppet_parameters, 'shell': self.schema.shell_parameters, 'reboot': self.schema.reboot_parameters } for idx, task in enumerate(tasks): self.validate_schema(task.get('parameters'), schemas[task['type']], self.tasks_path, value_path=[idx, 'parameters']) else: logger.debug('File "%s" doesn\'t exist', self.tasks_path)
def check_deployment_tasks(self): logger.debug('Start deployment tasks checking "%s"', self.deployment_tasks_path) deployment_tasks = utils.parse_yaml(self.deployment_tasks_path) schemas = { 'puppet': self.schema.puppet_task, 'shell': self.schema.shell_task, 'group': self.schema.group_task, 'skipped': self.schema.skipped_task, 'copy_files': self.schema.copy_files_task, 'sync': self.schema.sync_task, 'upload_file': self.schema.upload_file_task, 'stage': self.schema.stage_task, 'reboot': self.schema.reboot_task } for idx, deployment_task in enumerate(deployment_tasks): if deployment_task['type'] not in schemas: error_msg = 'There is no such task type:' \ '{0}'.format(deployment_task['type']) raise errors.ValidationError(error_msg) if deployment_task['type'] not in self.schema.roleless_tasks: for role_alias in self.schema.role_aliases: deployment_role = deployment_task.get(role_alias) if deployment_role: break else: logger.warn( 'Task {0} does not contain {1} fields. That ' 'may lead to tasks being unassigned to nodes.'.format( deployment_task['id'], '/'.join(self.schema.role_aliases))) self.validate_schema(deployment_task, schemas[deployment_task['type']], self.deployment_tasks_path, value_path=[idx])
def check_deployment_tasks(self): logger.debug( 'Start deployment tasks checking "%s"', self.deployment_tasks_path) deployment_tasks = utils.parse_yaml(self.deployment_tasks_path) schemas = { 'puppet': self.schema.puppet_task, 'shell': self.schema.shell_task, 'group': self.schema.group_task, 'skipped': self.schema.skipped_task, 'copy_files': self.schema.copy_files, 'sync': self.schema.sync, 'upload_file': self.schema.upload_file, 'stage': self.schema.stage, 'reboot': self.schema.reboot} for idx, deployment_task in enumerate(deployment_tasks): self.validate_schema( deployment_task, schemas[deployment_task['type']], self.deployment_tasks_path, value_path=[idx])
def _parse_tasks(self): return utils.parse_yaml(self.tasks_path)
def validate_file_by_schema(self, schema, path): data = utils.parse_yaml(path) self.validate_schema(data, schema, path)
def __init__(self, plugin_path): self.plugin_path = plugin_path self.meta = utils.parse_yaml(join_path(plugin_path, 'metadata.yaml')) self.package_version = self.meta.get('package_version')