def _check_tags(self, attribute, value): """Check tags field for - presense of at least one required tag based on config - maximum count of tags - tag regular expression - tag max length """ no_req_tag_err = f'At least one tag required from tag list: {", ".join(REQUIRED_TAG_LIST)}' config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) if cfg.check_required_tags and not value: self.value_error(no_req_tag_err) if cfg.check_required_tags and (not any(tag in REQUIRED_TAG_LIST for tag in value)): self.value_error(no_req_tag_err) if not value: return if len(value) > constants.MAX_TAGS_COUNT: self.value_error( f"Expecting no more than {constants.MAX_TAGS_COUNT} tags in metadata" ) for tag in value: if not re.match(constants.NAME_REGEXP, tag): self.value_error(f"'tag' has invalid format: {tag}") if len(tag) > MAX_LENGTH_TAG: self.value_error( f"Each tag in 'tags' list must not be greater than {MAX_LENGTH_TAG} characters" )
def test_no_config_data(): cfg = config.Config(config_data={}) assert cfg.log_level_main == 'INFO' assert cfg.run_ansible_test is False assert cfg.ansible_test_local_image is False assert cfg.local_image_docker is False assert cfg.infra_osd is False
def doc_string_loader(): cfg = config.Config(config_data=config.ConfigFile.load()) return loaders.DocStringLoader( path="/tmp_dir/tmp123/ansible_collections/my_namespace/my_collection", fq_collection_name="my_namespace.my_collection", cfg=cfg, )
def test_get_runner_ansible_test_local(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert runners.get_runner(cfg) == runners.LocalAnsibleTestRunner
def test_get_runner_local_image(temp_config_file): with open(temp_config_file, "w") as f: f.write("[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n" "ANSIBLE_TEST_LOCAL_IMAGE = True") f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert runners.get_runner(cfg) == runners.LocalImageTestRunner
def test_get_runner_pulp_and_osd(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'INFRA_PULP = True\nINFRA_OSD = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert runners.get_runner(cfg) == runners.OpenshiftJobTestRunner
def test_osd_config_wins_over_local_image(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'ANSIBLE_TEST_LOCAL_IMAGE = True\nINFRA_OSD = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert runners.get_runner(cfg) == runners.OpenshiftJobTestRunner
def test_required_tag_enabled(collection_info, temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nCHECK_REQUIRED_TAGS = True') f.flush() config_data = config.ConfigFile.load() config.Config(config_data=config_data) collection_info['tags'] = ['application'] res = CollectionInfo(**collection_info) assert ['application'] == res.tags
def test_no_config_data(): cfg = config.Config(config_data={}) assert cfg.log_level_main == "INFO" assert cfg.run_ansible_test is False assert cfg.ansible_test_local_image is False assert cfg.local_image_docker is False assert cfg.infra_osd is False assert cfg.tmp_root_dir is None assert cfg.ansible_local_tmp == "~/.ansible/tmp"
def import_collection(file, filename=None, logger=None, cfg=None): """Process import on collection artifact file object. :raises exc.ImporterError: On errors that fail the import process. """ if not cfg: config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) logger = logger or default_logger return _import_collection(file, filename, logger, cfg)
def test_config_bad_ini_section(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[bad-section]\nRUN_ANSIBLE_TEST = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == 'INFO' assert cfg.run_ansible_test is False assert cfg.infra_pulp is False assert cfg.infra_osd is False
def test_config_set_from_env(temp_config_file_b, monkeypatch): with open(temp_config_file_b, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'INFRA_PULP = True') f.flush() monkeypatch.setenv('GALAXY_IMPORTER_CONFIG', temp_config_file_b) config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.run_ansible_test is True assert cfg.infra_pulp is True
def test_required_tag_enabled_exception(collection_info, temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nCHECK_REQUIRED_TAGS = True') f.flush() config_data = config.ConfigFile.load() config.Config(config_data=config_data) collection_info['tags'] = ['fail'] with pytest.raises(ValueError, match=r'At least one tag required from tag list: '): CollectionInfo(**collection_info)
def test_config_set_from_file(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'INFRA_PULP = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == 'INFO' assert cfg.run_ansible_test is True assert cfg.infra_pulp is True assert cfg.infra_osd is False
def test_config_with_non_boolean(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'LOG_LEVEL_MAIN = DEBUG') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == 'DEBUG' assert cfg.run_ansible_test is True assert cfg.infra_pulp is False assert cfg.infra_osd is False
def test_config_bad_ini_section(temp_config_file): with open(temp_config_file, "w") as f: f.write("[bad-section]\nRUN_ANSIBLE_TEST = True") f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == "INFO" assert cfg.run_ansible_test is False assert cfg.ansible_test_local_image is False assert cfg.local_image_docker is False assert cfg.infra_osd is False
def test_config_with_non_boolean(temp_config_file): with open(temp_config_file, "w") as f: f.write("[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n" "LOG_LEVEL_MAIN = DEBUG") f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == "DEBUG" assert cfg.run_ansible_test is True assert cfg.ansible_test_local_image is False assert cfg.local_image_docker is False assert cfg.infra_osd is False
def test_config_set_from_file(temp_config_file): with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'ANSIBLE_TEST_LOCAL_IMAGE = True\n' 'LOCAL_IMAGE_DOCKER = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == 'INFO' assert cfg.run_ansible_test is True assert cfg.ansible_test_local_image is True assert cfg.local_image_docker is True assert cfg.infra_osd is False
def main(args=None): config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) setup_logger(cfg) args = parse_args(args) data = call_importer(filepath=args.file, cfg=cfg) if not data: return 1 if args.print_result: print(json.dumps(data, indent=4)) write_output_file(data)
def test_ansible_test_runner_run(mocker, temp_config_file): mocker.patch.object(runners, 'LocalAnsibleTestRunner') mocker.patch.object(runners, 'OpenshiftJobTestRunner') with open(temp_config_file, 'w') as f: f.write('[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n' 'ANSIBLE_TEST_LOCAL_IMAGE = True\nINFRA_OSD = True') f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) ansible_test_runner = runners.get_runner(cfg) ansible_test_runner().run() assert not runners.LocalAnsibleTestRunner.called assert runners.OpenshiftJobTestRunner.called
def _check_tags(self, attribute, value): """Check max tags and check against both tag regular expression and required tag list.""" if value is not None and len(value) > constants.MAX_TAGS_COUNT: self.value_error( f"Expecting no more than {constants.MAX_TAGS_COUNT} tags " "in metadata") for tag in value: if not re.match(constants.NAME_REGEXP, tag): self.value_error(f"'tag' has invalid format: {tag}") config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) if cfg.check_required_tags and (not any(tag in REQUIRED_TAG_LIST for tag in value)): self.value_error( f'At least one tag required from tag list: {", ".join(REQUIRED_TAG_LIST)}' )
def test_config_set_from_file(temp_config_file): with open(temp_config_file, "w") as f: f.write("[galaxy-importer]\nRUN_ANSIBLE_TEST = True\n" "ANSIBLE_TEST_LOCAL_IMAGE = True\n" "LOCAL_IMAGE_DOCKER = True\n" "TMP_ROOT_DIR = /tmp\n" "ANSIBLE_LOCAL_TMP = /tmp/ansible") f.flush() config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert cfg.log_level_main == "INFO" assert cfg.run_ansible_test is True assert cfg.ansible_test_local_image is True assert cfg.local_image_docker is True assert cfg.infra_osd is False assert cfg.tmp_root_dir == "/tmp" assert cfg.ansible_local_tmp == "/tmp/ansible"
def import_collection( file=None, filename=None, file_url=None, git_clone_path=None, output_path=None, logger=None, cfg=None, ): """Process import on collection artifact file object. :param file: file handle of collection artifact. :param filename: namedtuple of CollectionFilename. :param file_url: storage url of collection artifact. :param git_clone_path: path to git repo directory of collection pre artifact build. :param output_path: path where collection build tarball file will be written. :param logger: Optional logger instance. :param cfg: Optional config. :raises exc.ImporterError: On errors that fail the import process. :return: metadata if `file` provided, (metadata, filepath) if `git_clone_path` provided """ logger.info(f"Importing with galaxy-importer {__version__}") if not cfg: config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) logger = logger or default_logger if (file and git_clone_path) or not (file or git_clone_path): raise exc.ImporterError( "Expected either 'file' or 'git_clone_path' to be populated") if git_clone_path: filepath = _build_collection(git_clone_path, output_path, logger) with open(filepath, "rb") as fh: metadata = _import_collection(fh, filename=None, file_url=None, logger=logger, cfg=cfg) return (metadata, filepath) return _import_collection(file, filename, file_url, logger, cfg)
def main(): filepath = sys.argv[1] savepath = sys.argv[2] logging.basicConfig( stream=sys.stdout, format='%(levelname)s: %(message)s', level=logging.INFO) config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) # Modified importer to importer directory instead of tarball data = collection.CollectionLoader( filepath, filepath, cfg=cfg, logger=logging).load() json_data = json.dumps(attr.asdict(data), indent=2) with open(os.path.join(savepath), 'w+') as output_file: output_file.write(json_data)
def _check_version_format(self, attribute, value): """Check that version is in semantic version format, and max length.""" if not semantic_version.validate(value): self.value_error( f"Expecting 'version' to be in semantic version format, instead found '{value}'." ) if len(value) > MAX_LENGTH_VERSION: self.value_error( f"'version' must not be greater than {MAX_LENGTH_VERSION} characters" ) config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) if cfg.require_v1_or_greater: if semantic_version.Version(value) < semantic_version.Version( "1.0.0"): self.value_error( "Config is enabled that requires version to be 1.0.0 or greater." )
def run(self): cfg = config.Config(config_data=config.ConfigFile.load()) build = Build( self.filepath, f'{self.metadata.namespace}-{self.metadata.name}-{self.metadata.version}', cfg, self.log) container_engine = build.get_container_engine(cfg) if not shutil.which(container_engine): self.log.warning( f'"{container_engine}" not found, skipping ansible-test sanity' ) return image_id = build.build_image() self.log.info('Running image...') self._run_image(image_id=image_id, container_engine=container_engine) build.cleanup()
def sync_collection(git_clone_path, output_path, logger=None, cfg=None): """Process collection metadata without linting to support pulp-ansible sync. Call _import_collection() with an overridden config to process metadata without linting and without running ansible-test. """ logger = logger or default_logger if not cfg: config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) cfg.run_ansible_test = False cfg.run_ansible_lint = False cfg.run_flake8 = False filepath = _build_collection(git_clone_path, output_path, logger) with open(filepath, "rb") as fh: metadata = _import_collection(fh, filename=None, file_url=None, logger=logger, cfg=cfg) return (metadata, filepath)
import logging import os import pkg_resources import requests import time import uuid import yaml from galaxy_importer import config from galaxy_importer import exceptions from galaxy_importer.ansible_test.runners.base import BaseTestRunner default_logger = logging.getLogger(__name__) cfg = config.Config() POD_CHECK_RETRIES = 200 # TODO: try to shorten once not pulling image from quay POD_CHECK_DELAY_SECONDS = 1 OCP_SERVICEACCOUNT_PATH = '/var/run/secrets/kubernetes.io/serviceaccount/' TEMP_IMG_WITH_ARCHIVE = 'quay.io/awcrosby/ans-test-with-archive' class OpenshiftJobTestRunner(BaseTestRunner): """Run image as an openshift job.""" def run(self): # TODO: change from temp image to build image with pulp-container # image = container_build.build_image_with_artifact() image = TEMP_IMG_WITH_ARCHIVE job = Job( ocp_domain=os.environ['IMPORTER_API_DOMAIN'],
def test_no_config_data(): cfg = config.Config(config_data={}) assert cfg.log_level_main == 'INFO' assert cfg.run_ansible_test is False assert cfg.infra_pulp is False assert cfg.infra_osd is False
def test_get_runner_no_config_file(): config_data = config.ConfigFile.load() cfg = config.Config(config_data=config_data) assert runners.get_runner(cfg) is None