def validate_file_by_schema(self, schema, file_path, check_file_exists=True): if not check_file_exists and not utils.exists(file_path): logger.debug('No file "%s". Skipping check.', file_path) return if not utils.exists(file_path): raise errors.FileDoesNotExist(file_path) data = utils.parse_yaml(file_path) if data is None: raise errors.FileIsEmpty(file_path) self.validate_schema(data, schema, file_path)
def validate_file_by_schema(self, schema, file_path, allow_not_exists=False, allow_empty=False): """Validate file with given JSON schema. :param schema: object dict :type schema: object :param file_path: path to the file :type file_path: basestring :param allow_not_exists: if true don't raise error on missing file :type allow_not_exists: bool :param allow_empty: allow file to contain no json :type allow_empty: bool :return: """ if not utils.exists(file_path): if allow_not_exists: logger.debug('No file "%s". Skipping check.', file_path) return else: raise errors.FileDoesNotExist(file_path) data = utils.parse_yaml(file_path) if data is not None: self.validate_schema(data, schema, file_path) else: if not allow_empty: raise errors.FileIsEmpty(file_path)
def _parse_tasks(self): if utils.exists(self.tasks_path): tasks = utils.parse_yaml(self.tasks_path) # Tasks schema is not checked in check_schemas, thus # we perform manual check on parsing tasks file if tasks is None: raise errors.FileIsEmpty(self.tasks_path) return None
def check(self): if utils.exists(self.plugin_path): raise errors.PluginDirectoryExistsError( 'Plugins directory {0} already exists, ' 'choose another name'.format(self.plugin_path)) if not self.plugin_name_pattern.match(self.plugin_name): raise errors.ValidationError( messages.PLUGIN_WRONG_NAME_EXCEPTION_MESSAGE)
def make_tarball(self): full_name = '{0}-{1}'.format(self.meta['name'], self.meta['version']) tar_name = '{0}.tar'.format(full_name) tar_path = os.path.join(self.plugin_path, tar_name) if utils.exists(tar_path): utils.exec_cmd('rm -f {0}'.format(tar_path)) tar = tarfile.open(tar_path, 'w') tar.add(self.build_dir, arcname=full_name) tar.close()
def make_tarball(self): full_name = '{0}-{1}'.format(self.meta['name'], self.meta['version']) tar_name = '{0}.tar'.format(full_name) tar_path = os.path.join( self.plugin_path, tar_name) if utils.exists(tar_path): utils.exec_cmd('rm -f {0}'.format(tar_path)) tar = tarfile.open(tar_path, 'w') tar.add(self.build_dir, arcname=full_name) tar.close()
def check_releases_paths(self): meta = utils.parse_yaml(self.meta_path) for release in meta['releases']: scripts_path = join_path(self.plugin_path, release['deployment_scripts_path']) repo_path = join_path(self.plugin_path, release['repository_path']) wrong_paths = [] for path in [scripts_path, repo_path]: if not utils.exists(path): wrong_paths.append(path) if wrong_paths: raise errors.ReleasesDirectoriesError( 'Cannot find directories {0} for release "{1}"'.format( ', '.join(wrong_paths), release))
def get_version_mapping_from_plugin(plugin_path): """Returns mapping for specific version of the plugin :param str plugin_path: path to the directory with metadata.yaml file :returns: dict which contains 'version' - package version 'validator' - validator class 'templates' - path to templates 'builder' - builder class """ meta_path = join_path(plugin_path, 'metadata.yaml') if not utils.exists(meta_path): errors.WrongPluginDirectoryError( 'Wrong path to the plugin, cannot find "%s" file', meta_path) meta = utils.parse_yaml(meta_path) package_version = meta.get('package_version') return get_plugin_for_version(package_version)
def check_releases_paths(self): meta = utils.parse_yaml(self.meta_path) for release in meta['releases']: scripts_path = join_path( self.plugin_path, release['deployment_scripts_path']) repo_path = join_path( self.plugin_path, release['repository_path']) wrong_paths = [] for path in [scripts_path, repo_path]: if not utils.exists(path): wrong_paths.append(path) if wrong_paths: raise errors.ReleasesDirectoriesError( 'Cannot find directories {0} for release "{1}"'.format( ', '.join(wrong_paths), release))
def check_tasks(self): """Check legacy tasks.yaml.""" logger.debug('Start tasks checking "%s"', self.tasks_path) if utils.exists(self.tasks_path): # todo(ikutukov): remove self._check_tasks tasks = utils.parse_yaml(self.tasks_path) if tasks is None: return schemas = { 'puppet': self.schema.puppet_parameters, 'shell': self.schema.shell_parameters, 'reboot': self.schema.reboot_parameters} for idx, task in enumerate(tasks): self.validate_schema( task.get('parameters'), schemas[task['type']], self.tasks_path, value_path=[idx, 'parameters']) else: logger.debug('File "%s" doesn\'t exist', self.tasks_path)
def check_tasks(self): """Check legacy tasks.yaml.""" logger.debug('Start tasks checking "%s"', self.tasks_path) if utils.exists(self.tasks_path): # todo(ikutukov): remove self._check_tasks tasks = utils.parse_yaml(self.tasks_path) if tasks is None: return schemas = { 'puppet': self.schema.puppet_parameters, 'shell': self.schema.shell_parameters, 'reboot': self.schema.reboot_parameters } for idx, task in enumerate(tasks): self.validate_schema(task.get('parameters'), schemas[task['type']], self.tasks_path, value_path=[idx, 'parameters']) else: logger.debug('File "%s" doesn\'t exist', self.tasks_path)
def check(self): if utils.exists(self.plugin_path): raise errors.PluginDirectoryExistsError( 'Plugins directory {0} already exists, ' 'choose anothe name'.format(self.plugin_path))
def test_exists(self, os_exists): file_path = '/dir/path' self.assertTrue(utils.exists(file_path)) os_exists.assert_called_once_with(file_path)
def test_exists_returns_false(self, os_exists): file_path = '/dir/path' self.assertFalse(utils.exists(file_path)) os_exists.assert_called_once_with(file_path)