def main(): parser, args = cli() os_conf = Configuration(conf_file=args.config, conf_section=args.config_section, cli_args=args) build_conf = Configuration(conf_file=args.config, conf_section=args.config_section, cli_args=args) if os_conf.get_verbosity(): set_logging(level=logging.DEBUG) logger.debug("Logging level set to debug") elif args.quiet: set_logging(level=logging.WARNING) else: set_logging(level=logging.INFO) osbs = OSBS(os_conf, build_conf) try: args.func(args, osbs) except AttributeError as ex: if hasattr(args, 'func'): raise else: parser.print_help() except KeyboardInterrupt: print("Quitting on user request.") pass except HTTPError as ex: logger.error("HTTP error: %d", ex.getcode()) except Exception as ex: if args.verbose: raise else: logger.error("Exception caught: %s", repr(ex))
def test_oauth2_token(self, config, kwargs, cli_args, expected): if 'token_file' in kwargs: tmpf = self.tmpfile_with_content(kwargs['token_file']) kwargs['token_file'] = tmpf.name with self.build_cli_args(cli_args) as args: with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file, cli_args=args, **kwargs) assert conf.get_oauth2_token() == expected
def test_no_build_type(): with NamedTemporaryFile(mode='w+') as f: f.write(""" [default] build_host=localhost """) f.flush() f.seek(0) os_conf = Configuration(conf_file=f.name, conf_section="default") assert os_conf.get_build_type() is None
def test_no_build_type(): with NamedTemporaryFile(mode='w+') as f: f.write(""" [default] build_host=localhost """) f.flush() f.seek(0) with pytest.raises(OsbsException): os_conf = Configuration(conf_file=f.name, conf_section="default") os_conf.get_build_type()
def main(): parser, args = cli() try: os_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) build_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) except OsbsException as ex: logger.error("Configuration error: %s", ex.message) return -1 is_verbose = os_conf.get_verbosity() if is_verbose: set_logging(level=logging.DEBUG) logger.debug("Logging level set to debug") elif args.quiet: set_logging(level=logging.WARNING) else: set_logging(level=logging.INFO) osbs = OSBS(os_conf, build_conf) try: args.func(args, osbs) except AttributeError as ex: if hasattr(args, 'func'): raise else: parser.print_help() except KeyboardInterrupt: print("Quitting on user request.") return -1 except OsbsNetworkException as ex: if is_verbose: raise else: logger.error("Network error at %s (%d): %s", ex.url, ex.status_code, ex.message) return -1 except Exception as ex: # pylint: disable=broad-except if is_verbose: raise else: logger.error("Exception caught: %s", repr(ex)) return -1
def test_build_image(self): build_image = 'registry.example.com/buildroot:2.0' with NamedTemporaryFile(mode='wt') as fp: fp.write(""" [general] build_json_dir = {build_json_dir} [default] openshift_url = / sources_command = /bin/true vendor = Example, Inc registry_uri = registry.example.com build_host = localhost authoritative_registry = localhost distribution_scope = private build_type = prod build_image = {build_image} """.format(build_json_dir='inputs', build_image=build_image)) fp.flush() config = Configuration(fp.name) osbs = OSBS(config, config) assert config.get_build_image() == build_image class MockParser(object): labels = {'Name': 'fedora23/something'} baseimage = 'fedora23/python' (flexmock(utils) .should_receive('get_df_parser') .with_args(TEST_GIT_URI, TEST_GIT_REF, git_branch=TEST_GIT_BRANCH) .and_return(MockParser())) # Return the request as the response so we can check it def request_as_response(request): request.json = request.render() return request flexmock(OSBS, _create_build_config_and_build=request_as_response) req = osbs.create_prod_build(TEST_GIT_URI, TEST_GIT_REF, TEST_GIT_BRANCH, TEST_USER, TEST_COMPONENT, TEST_TARGET, TEST_ARCH) img = req.json['spec']['strategy']['customStrategy']['from']['name'] assert img == build_image
def test_builder_build_json_dir(self, config, expected): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file) assert conf.get_builder_build_json_store() == expected
def test_get_node_selector_platform(self, platform, kwargs, config, expected): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file, **kwargs) assert conf.get_platform_node_selector(platform) == expected
def test_no_config(): os_conf = Configuration(conf_file=None, openshift_uri='https://example:8443') assert os_conf.get_openshift_oauth_api_uri() == \ 'https://example:8443/oauth/authorize'
def __init__(self, tasker, workflow, kojihub, url, build_json_dir, koji_upload_dir, verify_ssl=True, use_auth=True, koji_ssl_certs_dir=None, koji_proxy_user=None, koji_principal=None, koji_keytab=None, blocksize=None, prefer_schema1_digest=True, platform='x86_64', report_multiple_digests=False): """ constructor :param tasker: DockerTasker instance :param workflow: DockerBuildWorkflow instance :param kojihub: string, koji hub (xmlrpc) :param url: string, URL for OSv3 instance :param build_json_dir: str, path to directory with input json :param koji_upload_dir: str, path to use when uploading to hub :param verify_ssl: bool, verify OSv3 SSL certificate? :param use_auth: bool, initiate authentication with OSv3? :param koji_ssl_certs_dir: str, path to 'cert', 'ca', 'serverca' :param koji_proxy_user: str, user to log in as (requires hub config) :param koji_principal: str, Kerberos principal (must specify keytab) :param koji_keytab: str, keytab name (must specify principal) :param blocksize: int, blocksize to use for uploading files :param prefer_schema1_digest: bool, when True, v2 schema 1 digest will be preferred as the built image digest :param platform: str, platform name for this build :param report_multiple_digests: bool, whether to report both schema 1 and schema 2 digests; if truthy, prefer_schema1_digest is ignored """ super(KojiUploadPlugin, self).__init__(tasker, workflow) self.kojihub = kojihub self.koji_ssl_certs_dir = koji_ssl_certs_dir self.koji_proxy_user = koji_proxy_user self.koji_principal = koji_principal self.koji_keytab = koji_keytab self.blocksize = blocksize self.build_json_dir = build_json_dir self.koji_upload_dir = koji_upload_dir self.prefer_schema1_digest = prefer_schema1_digest self.report_multiple_digests = report_multiple_digests self.namespace = get_build_json().get('metadata', {}).get('namespace', None) osbs_conf = Configuration(conf_file=None, openshift_uri=url, use_auth=use_auth, verify_ssl=verify_ssl, build_json_dir=self.build_json_dir, namespace=self.namespace) self.osbs = OSBS(osbs_conf, osbs_conf) self.build_id = None self.pullspec_image = None self.platform = platform
def main(): parser, args = cli() try: os_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) build_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) except OsbsException as ex: logger.error("Configuration error: %s", ex.message) return -1 is_verbose = os_conf.get_verbosity() if args.quiet: set_logging(level=logging.WARNING) elif is_verbose: set_logging(level=logging.DEBUG) logger.debug("Logging level set to debug") else: set_logging(level=logging.INFO) osbs = OSBS(os_conf, build_conf) if args.capture_dir is not None: setup_json_capture(osbs, os_conf, args.capture_dir) try: args.func(args, osbs) except AttributeError as ex: if hasattr(args, 'func'): raise else: parser.print_help() except KeyboardInterrupt: print("Quitting on user request.") return -1 except OsbsNetworkException as ex: if is_verbose: raise else: logger.error("Network error at %s (%d): %s", ex.url, ex.status_code, ex.message) return -1 except OsbsAuthException as ex: if is_verbose: raise else: logger.error("Authentication failure: %s", ex.message) return -1 except OsbsResponseException as ex: if is_verbose: raise else: if isinstance(ex.json, dict) and 'message' in ex.json: msg = ex.json['message'] else: msg = ex.message logger.error("Server returned error %s: %s", ex.status_code, msg) return -1 except Exception as ex: # pylint: disable=broad-except if is_verbose: raise else: logger.error("Exception caught: %s", repr(ex)) return -1
def _get_openshift_session(self, kwargs): conf = Configuration(**kwargs) return OSBS(conf, conf)
from tests.constants import (TEST_BUILD, TEST_COMPONENT, TEST_GIT_REF, TEST_GIT_BRANCH, TEST_BUILD_CONFIG) from tempfile import NamedTemporaryFile try: # py2 import httplib import urlparse except ImportError: # py3 import http.client as httplib import urllib.parse as urlparse logger = logging.getLogger("osbs.tests") API_VER = Configuration.get_openshift_api_version() OAPI_PREFIX = "/oapi/{v}/".format(v=API_VER) API_PREFIX = "/api/{v}/".format(v=API_VER) class StreamingResponse(object): def __init__(self, status_code=200, content=b'', headers=None): self.status_code = status_code self.content = content self.headers = headers or {} def iter_lines(self): yield self.content.decode("utf-8") def __enter__(self): return self
def test_reactor_config(self, config, expected, expected_scratch): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file, conf_section='default') assert conf.get_reactor_config_map() == expected assert conf.get_reactor_config_map_scratch() == expected_scratch
def test_missing_section(): with NamedTemporaryFile() as f: os_conf = Configuration(conf_file=f.name, conf_section="missing")
def test_missing_config(): os_conf = Configuration(conf_file="/nonexistent/path", conf_section="default")
def test_render_orchestrate_build(self, tmpdir, platforms, build_from, build_image, build_imagestream, worker_build_image, additional_kwargs, koji_parent_build, valid): phase = 'buildstep_plugins' plugin = 'orchestrate_build' kwargs = { 'git_uri': TEST_GIT_URI, 'git_ref': TEST_GIT_REF, 'git_branch': TEST_GIT_BRANCH, 'user': "******", 'component': TEST_COMPONENT, 'base_image': 'fedora:latest', 'name_label': 'fedora/resultingimage', 'platforms': platforms, 'build_type': BUILD_TYPE_ORCHESTRATOR, 'reactor_config_map': 'reactor-config-map', 'reactor_config_override': 'reactor-config-override', } if build_image: kwargs['build_image'] = build_image if build_imagestream: kwargs['build_imagestream'] = build_imagestream if build_from: kwargs['build_from'] = build_from if koji_parent_build: kwargs['koji_parent_build'] = koji_parent_build kwargs.update(additional_kwargs) self.mock_repo_info() user_params = BuildUserParams(INPUTS_PATH) if valid: user_params.set_params(**kwargs) build_json = PluginsConfiguration(user_params).render() else: with pytest.raises(OsbsValidationException): user_params.set_params(**kwargs) build_json = PluginsConfiguration(user_params).render() return plugins = get_plugins_from_build_json(build_json) if platforms is None: platforms = {} assert plugin_value_get(plugins, phase, plugin, 'args', 'platforms') == platforms or {} build_kwargs = plugin_value_get(plugins, phase, plugin, 'args', 'build_kwargs') assert build_kwargs[ 'arrangement_version'] == REACTOR_CONFIG_ARRANGEMENT_VERSION assert build_kwargs.get('koji_parent_build') == koji_parent_build assert build_kwargs.get('reactor_config_map') == 'reactor-config-map' assert build_kwargs.get( 'reactor_config_override') == 'reactor-config-override' worker_config_kwargs = plugin_value_get(plugins, phase, plugin, 'args', 'config_kwargs') worker_config = Configuration(conf_file=None, **worker_config_kwargs) if isinstance(worker_build_image, type): with pytest.raises(worker_build_image): worker_config_kwargs['build_image'] assert not worker_config.get_build_image() else: assert worker_config_kwargs['build_image'] == worker_build_image assert worker_config.get_build_image() == worker_build_image if kwargs.get('flatpak', False): assert kwargs.get('flatpak') is True assert kwargs.get('flatpak_base_image' ) == worker_config.get_flatpak_base_image()
def test_pipeline_run_path(self, config, expected): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file, conf_section='default') assert conf.get_pipeline_run_path() == expected
from osbs.api import OSBS from osbs.constants import ANNOTATION_SOURCE_REPO from tests.constants import (TEST_BUILD, TEST_CANCELLED_BUILD, TEST_ORCHESTRATOR_BUILD, TEST_GIT_BRANCH, TEST_BUILD_CONFIG, TEST_GIT_URI_HUMAN_NAME, TEST_KOJI_TASK_ID, TEST_IMAGESTREAM, TEST_IMAGESTREAM_NO_TAGS, TEST_IMAGESTREAM_WITH_ANNOTATION) from tempfile import NamedTemporaryFile from textwrap import dedent from six.moves import http_client from six.moves.urllib.parse import urlparse logger = logging.getLogger("osbs.tests") API_VER = Configuration.get_openshift_api_version() OAPI_PREFIX = "/oapi/{v}/".format(v=API_VER) API_PREFIX = "/api/{v}/".format(v=API_VER) class StreamingResponse(object): def __init__(self, status_code=200, content=b'', headers=None): self.status_code = status_code self.content = content self.headers = headers or {} def iter_lines(self): yield self.content def __enter__(self): return self
from osbs.api import OSBS from osbs.constants import ANNOTATION_SOURCE_REPO, ANNOTATION_INSECURE_REPO from tests.constants import ( TEST_BUILD, TEST_CANCELLED_BUILD, TEST_ORCHESTRATOR_BUILD, TEST_GIT_BRANCH, TEST_BUILD_CONFIG, TEST_GIT_URI_HUMAN_NAME, TEST_KOJI_TASK_ID, TEST_IMAGESTREAM, TEST_IMAGESTREAM_NO_TAGS, TEST_IMAGESTREAM_WITH_ANNOTATION, TEST_IMAGESTREAM_WITHOUT_IMAGEREPOSITORY, TEST_GIT_URI_SANITIZED) from tempfile import NamedTemporaryFile from textwrap import dedent from six.moves import http_client from six.moves.urllib.parse import urlparse logger = logging.getLogger("osbs.tests") API_VER = Configuration.get_k8s_api_version() APIS_PREFIX = "/apis/" API_PREFIX = "/api/{v}/".format(v=API_VER) API_BUILD_V1 = APIS_PREFIX + "build.openshift.io/v1/" API_IMAGE_V1 = APIS_PREFIX + "image.openshift.io/v1/" API_USER_V1 = APIS_PREFIX + "user.openshift.io/v1/" class StreamingResponse(object): def __init__(self, status_code=200, content=b'', headers=None): self.status_code = status_code self.content = content self.headers = headers or {} def iter_lines(self):
def test_get_smtp_error_addresses(self, config, expected): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file) assert conf.get_smtp_error_addresses() == expected
def run(self): try: build_json = json.loads(os.environ["BUILD"]) except KeyError: self.log.error( "No $BUILD env variable. Probably not running in build container." ) return kwargs = {} metadata = build_json.get("metadata", {}) if 'namespace' in metadata: kwargs['namespace'] = metadata['namespace'] try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl) osbs = OSBS(osbs_conf, osbs_conf) # usually repositories formed from NVR labels # these should be used for pulling and layering primary_repositories = [] for registry in self.workflow.push_conf.all_registries: for image in self.workflow.tag_conf.primary_images: registry_image = image.copy() registry_image.registry = registry.uri primary_repositories.append(registry_image.to_str()) # unique unpredictable repositories unique_repositories = [] for registry in self.workflow.push_conf.all_registries: for image in self.workflow.tag_conf.unique_images: registry_image = image.copy() registry_image.registry = registry.uri unique_repositories.append(registry_image.to_str()) repositories = { "primary": primary_repositories, "unique": unique_repositories, } try: commit_id = self.workflow.source.lg.commit_id except AttributeError: commit_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), "logs": "\n".join(self.workflow.build_logs), "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)), "repositories": json.dumps(repositories), "commit_id": commit_id, } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get( "md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get( "sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: labels["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) osbs.set_annotations_on_build(build_id, labels, **kwargs) return labels
def test_cleanup_used_resources(self, config, expected): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file, conf_section='default') assert conf.get_cleanup_used_resources() == expected
def test_get_allowed_artifacts_domain(self, config, expected): with self.config_file(config) as config_file: conf = Configuration(conf_file=config_file) assert conf.get_artifacts_allowed_domains() == expected
def test_render_orchestrate_build(self, tmpdir, platforms, build_from, worker_build_image, is_flatpak, koji_parent_build, valid, image_only): phase = 'buildstep_plugins' plugin = 'orchestrate_build' conf_args = { 'build_from': build_from, 'reactor_config_map': 'reactor-config-map', } extra_args = { 'base_image': 'fedora:latest', 'flatpak': is_flatpak, 'name_label': 'fedora/resultingimage', 'platforms': platforms, 'reactor_config_override': 'reactor-config-override', 'user': "******", } if koji_parent_build: extra_args['koji_parent_build'] = koji_parent_build if valid: user_params = get_sample_user_params(conf_args=conf_args, extra_args=extra_args) build_json = PluginsConfiguration(user_params).render() else: with pytest.raises(OsbsValidationException): user_params = get_sample_user_params(conf_args=conf_args, extra_args=extra_args) build_json = PluginsConfiguration(user_params).render() return plugins = get_plugins_from_build_json(build_json) if platforms is None: platforms = {} assert plugin_value_get(plugins, phase, plugin, 'args', 'platforms') == platforms or {} build_kwargs = plugin_value_get(plugins, phase, plugin, 'args', 'build_kwargs') assert build_kwargs[ 'arrangement_version'] == REACTOR_CONFIG_ARRANGEMENT_VERSION assert build_kwargs.get('koji_parent_build') == koji_parent_build assert build_kwargs.get('reactor_config_map') == 'reactor-config-map' assert build_kwargs.get( 'reactor_config_override') == 'reactor-config-override' worker_config_kwargs = plugin_value_get(plugins, phase, plugin, 'args', 'config_kwargs') worker_config = Configuration(conf_file=None, **worker_config_kwargs) if worker_build_image is KeyError: assert not worker_config.get_build_from() else: if image_only: assert worker_config_kwargs['build_from'] == worker_build_image assert worker_config.get_build_from() == worker_build_image else: assert 'build_from' not in worker_config_kwargs assert not worker_config.get_build_from() if is_flatpak: assert user_params.flatpak.value
def test_create_binary_container_pipeline_run(self, koji_task_id, isolated, scratch, release): rcm = 'rcm' rcm_scratch = 'rcm_scratch' with NamedTemporaryFile(mode="wt") as fp: fp.write(""" [default_binary] openshift_url = / namespace = {namespace} use_auth = false pipeline_run_path = {pipeline_run_path} reactor_config_map = {rcm} reactor_config_map_scratch = {rcm_scratch} """.format(namespace=TEST_OCP_NAMESPACE, pipeline_run_path=TEST_PIPELINE_RUN_TEMPLATE, rcm=rcm, rcm_scratch=rcm_scratch)) fp.flush() dummy_config = Configuration(fp.name, conf_section='default_binary') osbs = OSBS(dummy_config) random_postfix = 'sha-timestamp' (flexmock(utils).should_receive('generate_random_postfix').and_return( random_postfix)) name = utils.make_name_from_git(TEST_GIT_URI, TEST_GIT_BRANCH) pipeline_run_name = utils.make_name_from_git(TEST_GIT_URI, TEST_GIT_BRANCH) if isolated: pipeline_run_name = f'isolated-{random_postfix}' if scratch: pipeline_run_name = f'scratch-{random_postfix}' (flexmock(utils).should_receive('get_repo_info').with_args( TEST_GIT_URI, TEST_GIT_REF, git_branch=TEST_GIT_BRANCH, depth=None).and_return(self.mock_repo_info())) rand = '67890' timestr = '20170731111111' (flexmock(sys.modules['osbs.build.user_params']).should_receive( 'utcnow').once().and_return( datetime.datetime.strptime(timestr, '%Y%m%d%H%M%S'))) (flexmock(random).should_receive('randrange').with_args( 10**(len(rand) - 1), 10**len(rand)).and_return(int(rand))) image_tag = f'{TEST_USER}/{TEST_COMPONENT}:{TEST_TARGET}-{rand}-{timestr}' self.mock_start_pipeline() signing_intent = 'signing_intent' pipeline_run = osbs.create_binary_container_pipeline_run( target=TEST_TARGET, signing_intent=signing_intent, koji_task_id=koji_task_id, isolated=isolated, scratch=scratch, release=release, **REQUIRED_BUILD_ARGS) assert isinstance(pipeline_run, PipelineRun) assert pipeline_run.input_data['metadata']['name'] == pipeline_run_name for ws in pipeline_run.input_data['spec']['workspaces']: if ws['name'] == PRUN_TEMPLATE_REACTOR_CONFIG_WS: if scratch: assert ws['configmap']['name'] == rcm_scratch else: assert ws['configmap']['name'] == rcm if ws['name'] in [ PRUN_TEMPLATE_BUILD_DIR_WS, PRUN_TEMPLATE_CONTEXT_DIR_WS ]: assert ws['volumeClaimTemplate']['metadata'][ 'namespace'] == TEST_OCP_NAMESPACE for param in pipeline_run.input_data['spec']['params']: if param['name'] == PRUN_TEMPLATE_USER_PARAMS: assert param['value'] != {} up = json.loads(param['value']) expect_up = {} if scratch: expect_up['reactor_config_map'] = rcm_scratch expect_up['scratch'] = True else: expect_up['reactor_config_map'] = rcm expect_up['base_image'] = MockDfParser.baseimage expect_up['component'] = TEST_COMPONENT expect_up['git_branch'] = TEST_GIT_BRANCH expect_up['git_ref'] = TEST_GIT_REF expect_up['git_uri'] = TEST_GIT_URI expect_up['kind'] = BuildUserParams.KIND if koji_task_id: expect_up['koji_task_id'] = koji_task_id expect_up['name'] = name expect_up['koji_target'] = TEST_TARGET expect_up['user'] = TEST_USER expect_up['signing_intent'] = signing_intent if isolated: expect_up['isolated'] = True if release: expect_up['release'] = release expect_up['image_tag'] = image_tag assert up == expect_up
def main(): parser, args = cli() # OSBS2 TBD if we remove setup_json_capture, we can just create configuration without instance # as verbosity is read from general section # also we could even just read verbosity from args and create configurations only in # cmd functions try: if args.instance: os_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) else: os_conf = Configuration(conf_file=args.config, cli_args=args) except OsbsException as ex: logger.error("Configuration error: %s", ex.message) return -1 is_verbose = os_conf.get_verbosity() if args.quiet: set_logging(level=logging.WARNING) elif is_verbose: set_logging(level=logging.DEBUG) logger.debug("Logging level set to debug") else: set_logging(level=logging.INFO) # required just for setup_json_capture, if we don't need it anymore we could just remove it # OSBS2 TBD osbs = OSBS(os_conf) if args.capture_dir is not None: setup_json_capture(osbs, os_conf, args.capture_dir) return_value = -1 try: # OSBS2 TBD # this breaks all other commands which require 2nd osbs arg, which have to be cleaned, # also if we will still use some, like login/token, we would have to require # instance name, as we can't choose default since we have now 2 defaults # one for binary and another for source return_value = args.func(args) except AttributeError: if hasattr(args, 'func'): raise else: parser.print_help() except KeyboardInterrupt: print("Quitting on user request.") return -1 except OsbsNetworkException as ex: if is_verbose: raise else: logger.error("Network error at %s (%d): %s", ex.url, ex.status_code, ex) return -1 except OsbsAuthException as ex: if is_verbose: raise else: logger.error("Authentication failure: %s", ex) return -1 except OsbsResponseException as ex: if is_verbose: raise else: if isinstance(ex.json, dict) and 'message' in ex.json: msg = ex.json['message'] else: msg = str(ex) logger.error("Server returned error %s: %s", ex.status_code, msg) return -1 except Exception as ex: # pylint: disable=broad-except if is_verbose: raise else: logger.error("Exception caught: %s", repr(ex)) return -1 return return_value
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" base_image = self.workflow.builder.base_image if base_image is not None: base_image_name = base_image.to_str() try: base_image_id = self.workflow.base_image_inspect['Id'] except KeyError: base_image_id = "" else: base_image_name = "" base_image_id = "" try: with open(self.workflow.builder.df_path) as f: dockerfile_contents = f.read() except AttributeError: dockerfile_contents = "" annotations = { "dockerfile": dockerfile_contents, # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": base_image_name, "image-id": self.workflow.builder.image_id or '', "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } help_result = self.workflow.prebuild_results.get(AddHelpPlugin.key) if isinstance( help_result, dict ) and 'help_file' in help_result and 'status' in help_result: if help_result['status'] == AddHelpPlugin.NO_HELP_FILE_FOUND: annotations['help_file'] = json.dumps(None) elif help_result['status'] == AddHelpPlugin.HELP_GENERATED: annotations['help_file'] = json.dumps(help_result['help_file']) else: self.log.error("Unknown result from add_help plugin: %s", help_result) pulp_push_results = self.workflow.postbuild_results.get( PLUGIN_PULP_PUSH_KEY) if pulp_push_results: top_layer, _ = pulp_push_results annotations['v1-image-id'] = top_layer media_types = [] if pulp_push_results: media_types += [MEDIA_TYPE_DOCKER_V1] # pulp_pull may run on worker as a postbuild plugin or on orchestrator as an exit plugin pulp_pull_results = ( self.workflow.postbuild_results.get(PulpPullPlugin.key) or self.workflow.exit_results.get(PulpPullPlugin.key)) if isinstance(pulp_pull_results, Exception): pulp_pull_results = None if pulp_pull_results: media_types += pulp_pull_results if media_types: annotations['media-types'] = json.dumps( sorted(list(set(media_types)))) tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get( "md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get( "sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) annotations.update(self.get_config_map()) self.apply_build_result_annotations(annotations) # For arrangement version 4 onwards (where group_manifests # runs in the orchestrator build), restore the repositories # metadata which orchestrate_build adjusted. if PLUGIN_GROUP_MANIFESTS_KEY in self.workflow.postbuild_results: annotations['repositories'] = json.dumps(self.get_repositories()) try: osbs.set_annotations_on_build(build_id, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(build_id, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}
def main(): parser, args = cli() try: os_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) build_conf = Configuration(conf_file=args.config, conf_section=args.instance, cli_args=args) except OsbsException as ex: logger.error("Configuration error: %s", ex.message) return -1 is_verbose = os_conf.get_verbosity() if is_verbose: set_logging(level=logging.DEBUG) logger.debug("Logging level set to debug") elif args.quiet: set_logging(level=logging.WARNING) else: set_logging(level=logging.INFO) osbs = OSBS(os_conf, build_conf) if args.capture_dir is not None: setup_json_capture(osbs, os_conf, args.capture_dir) try: args.func(args, osbs) except AttributeError as ex: if hasattr(args, 'func'): raise else: parser.print_help() except KeyboardInterrupt: print("Quitting on user request.") return -1 except OsbsNetworkException as ex: if is_verbose: raise else: logger.error("Network error at %s (%d): %s", ex.url, ex.status_code, ex.message) return -1 except OsbsAuthException as ex: if is_verbose: raise else: logger.error("Authentication failure: %s", ex.message) return -1 except OsbsResponseException as ex: if is_verbose: raise else: if isinstance(ex.json, dict) and 'message' in ex.json: msg = ex.json['message'] else: msg = ex.message logger.error("Server returned error %s: %s", ex.status_code, msg) return -1 except Exception as ex: # pylint: disable=broad-except if is_verbose: raise else: logger.error("Exception caught: %s", repr(ex)) return -1
def run(self): try: build_json = json.loads(os.environ["BUILD"]) except KeyError: self.log.error("No $BUILD env variable. Probably not running in build container.") return kwargs = {} metadata = build_json.get("metadata", {}) if 'namespace' in metadata: kwargs['namespace'] = metadata['namespace'] try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" labels = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), "logs": "\n".join(self.workflow.build_logs), "rpm-packages": "\n".join(self.get_post_result(PostBuildRPMqaPlugin.key)), "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": self.workflow.base_image_inspect['Id'], "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id, "digests": json.dumps(self.get_digests()), } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: labels["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) osbs.set_annotations_on_build(build_id, labels, **kwargs) return labels
def test_create_source_container_pipeline_run(self, koji_task_id, scratch): rcm = 'rcm' rcm_scratch = 'rcm_scratch' with NamedTemporaryFile(mode="wt") as fp: fp.write(""" [default_source] openshift_url = / namespace = {namespace} use_auth = false pipeline_run_path = {pipeline_run_path} reactor_config_map = {rcm} reactor_config_map_scratch = {rcm_scratch} """.format(namespace=TEST_OCP_NAMESPACE, pipeline_run_path=TEST_PIPELINE_RUN_TEMPLATE, rcm=rcm, rcm_scratch=rcm_scratch)) fp.flush() dummy_config = Configuration(fp.name, conf_section='default_source') osbs = OSBS(dummy_config) random_postfix = 'sha-timestamp' (flexmock(utils).should_receive('generate_random_postfix').and_return( random_postfix)) pipeline_run_name = f'source-{random_postfix}' sources_for_koji_build_id = 123456 signing_intent = 'signing_intent' rand = '67890' timestr = '20170731111111' (flexmock(sys.modules['osbs.build.user_params']).should_receive( 'utcnow').once().and_return( datetime.datetime.strptime(timestr, '%Y%m%d%H%M%S'))) (flexmock(random).should_receive('randrange').with_args( 10**(len(rand) - 1), 10**len(rand)).and_return(int(rand))) image_tag = f'{TEST_USER}/{TEST_COMPONENT}:{TEST_TARGET}-{rand}-{timestr}' self.mock_start_pipeline() pipeline_run = osbs.create_source_container_build( target=TEST_TARGET, signing_intent=signing_intent, koji_task_id=koji_task_id, scratch=scratch, sources_for_koji_build_id=sources_for_koji_build_id, **REQUIRED_SOURCE_CONTAINER_BUILD_ARGS) assert isinstance(pipeline_run, PipelineRun) assert pipeline_run.input_data['metadata']['name'] == pipeline_run_name for ws in pipeline_run.input_data['spec']['workspaces']: if ws['name'] == PRUN_TEMPLATE_REACTOR_CONFIG_WS: if scratch: assert ws['configmap']['name'] == rcm_scratch else: assert ws['configmap']['name'] == rcm if ws['name'] in [ PRUN_TEMPLATE_BUILD_DIR_WS, PRUN_TEMPLATE_CONTEXT_DIR_WS ]: assert ws['volumeClaimTemplate']['metadata'][ 'namespace'] == TEST_OCP_NAMESPACE for param in pipeline_run.input_data['spec']['params']: if param['name'] == PRUN_TEMPLATE_USER_PARAMS: assert param['value'] != {} up = json.loads(param['value']) expect_up = {} if scratch: expect_up['reactor_config_map'] = rcm_scratch expect_up['scratch'] = True else: expect_up['reactor_config_map'] = rcm expect_up['component'] = TEST_COMPONENT expect_up['kind'] = SourceContainerUserParams.KIND if koji_task_id: expect_up['koji_task_id'] = koji_task_id expect_up['koji_target'] = TEST_TARGET expect_up['user'] = TEST_USER expect_up['image_tag'] = image_tag expect_up[ 'sources_for_koji_build_id'] = sources_for_koji_build_id expect_up['sources_for_koji_build_nvr'] = 'test-1-123' expect_up['signing_intent'] = signing_intent assert up == expect_up
def run(self): metadata = get_build_json().get("metadata", {}) kwargs = {} # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, build_json_dir=self.build_json_dir, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) imagestream = None try: imagestream = osbs.get_image_stream(self.imagestream) except OsbsResponseException: if self.insecure_registry is not None: kwargs['insecure_registry'] = self.insecure_registry self.log.info("Creating ImageStream %s for %s", self.imagestream, self.docker_image_repo) imagestream = osbs.create_image_stream(self.imagestream, self.docker_image_repo, **kwargs) self.log.info("Importing new tags for %s", self.imagestream) primaries = None try: primaries = self.workflow.build_result.annotations['repositories']['primary'] except (TypeError, KeyError): self.log.exception('Unable to read primary repositories annotations') if not primaries: raise RuntimeError('Could not find primary images in workflow') failures = False for s in primaries: tag_image_name = ImageName.parse(s) tag = tag_image_name.tag try: osbs.ensure_image_stream_tag(imagestream.json(), tag) self.log.info("Imported ImageStreamTag: (%s)", tag) except OsbsResponseException: failures = True self.log.info("Could not import ImageStreamTag: (%s)", tag) if failures: raise RuntimeError("Failed to import ImageStreamTag(s). Check logs") attempts = 0 while not osbs.import_image(self.imagestream): attempts += 1 if attempts >= self.import_attempts: msg = "Failed to import new tags for %s" raise RuntimeError(msg % self.imagestream) self.log.info("no new tags, will retry after %d seconds (%d/%d)", self.retry_delay, attempts, self.import_attempts) sleep(self.retry_delay)
def run(self): metadata = get_build_json().get("metadata", {}) try: build_id = metadata["name"] except KeyError: self.log.error("malformed build json") return self.log.info("build id = %s", build_id) # initial setup will use host based auth: apache will be set to accept everything # from specific IP and will set specific X-Remote-User for such requests # FIXME: remove `openshift_uri` once osbs-client is released osbs_conf = Configuration(conf_file=None, openshift_uri=self.url, openshift_url=self.url, use_auth=self.use_auth, verify_ssl=self.verify_ssl, namespace=metadata.get('namespace', None)) osbs = OSBS(osbs_conf, osbs_conf) try: commit_id = self.workflow.source.commit_id except AttributeError: commit_id = "" try: base_image_id = self.workflow.base_image_inspect['Id'] except docker.errors.NotFound: base_image_id = "" annotations = { "dockerfile": self.get_pre_result(CpDockerfilePlugin.key), "artefacts": self.get_pre_result(DistgitFetchArtefactsPlugin.key), # We no longer store the 'docker build' logs as an annotation "logs": '', # We no longer store the rpm packages as an annotation "rpm-packages": '', "repositories": json.dumps(self.get_repositories()), "commit_id": commit_id, "base-image-id": base_image_id, "base-image-name": self.workflow.builder.base_image.to_str(), "image-id": self.workflow.builder.image_id or '', "digests": json.dumps(self.get_pullspecs(self.get_digests())), "plugins-metadata": json.dumps(self.get_plugin_metadata()) } tar_path = tar_size = tar_md5sum = tar_sha256sum = None if len(self.workflow.exported_image_sequence) > 0: tar_path = self.workflow.exported_image_sequence[-1].get("path") tar_size = self.workflow.exported_image_sequence[-1].get("size") tar_md5sum = self.workflow.exported_image_sequence[-1].get("md5sum") tar_sha256sum = self.workflow.exported_image_sequence[-1].get("sha256sum") # looks like that openshift can't handle value being None (null in json) if tar_size is not None and tar_md5sum is not None and tar_sha256sum is not None and \ tar_path is not None: annotations["tar_metadata"] = json.dumps({ "size": tar_size, "md5sum": tar_md5sum, "sha256sum": tar_sha256sum, "filename": os.path.basename(tar_path), }) try: osbs.set_annotations_on_build(build_id, annotations) except OsbsResponseException: self.log.debug("annotations: %r", annotations) raise labels = self.make_labels() if labels: try: osbs.update_labels_on_build(build_id, labels) except OsbsResponseException: self.log.debug("labels: %r", labels) raise return {"annotations": annotations, "labels": labels}
def test_v2_all_values_and_json(self): repo_conf = RepoConfiguration(git_branch=TEST_GIT_BRANCH, git_ref=TEST_GIT_REF, git_uri=TEST_GIT_URI) repo_info = RepoInfo(configuration=repo_conf) build_conf = Configuration(conf_file=None, scratch=False) userdata = {'custom': 'userdata'} # all values that BuildUserParams stores param_kwargs = { 'base_image': 'buildroot:old', 'component': TEST_COMPONENT, 'compose_ids': [1, 2], 'filesystem_koji_task_id': TEST_FILESYSTEM_KOJI_TASK_ID, 'flatpak': False, # 'flatpak_base_image': self.flatpak_base_image, # not used with false flatpack # 'git_branch': TEST_GIT_BRANCH, # 'git_ref': TEST_GIT_REF, # 'git_uri': TEST_GIT_URI, 'image_tag': 'user/None:none-0-0', 'include_koji_repo': True, 'isolated': False, 'koji_parent_build': 'fedora-26-9', 'koji_target': 'tothepoint', 'operator_bundle_replacement_pullspecs': { 'foo/fedora:30': 'bar/fedora@sha256:deadbeef' }, # "orchestrator_deadline": 4, # set in config 'parent_images_digests': { 'registry.fedorahosted.org/fedora:29': { 'x86_64': 'registry.fedorahosted.org/fedora@sha256:8b96f2f9f88179a065738b2b37' '35e386efb2534438c2a2f45b74358c0f344c81' } }, # 'name': self.name, # calculated value 'platform': 'x86_64', 'platforms': [ 'x86_64', ], # 'reactor_config_map': 'reactor-config-map', # set in config 'release': '29', # 'scratch': True, # set in config 'signing_intent': False, 'task_id': TEST_KOJI_TASK_ID, # 'trigger_imagestreamtag': 'base_image:latest', # generated from base_image 'user': TEST_USER, 'userdata': userdata, # 'yum_repourls': , # not used with compose_ids # "worker_deadline": 3, # set in config } # additional values that BuildUserParams requires but stores under different names param_kwargs.update({ 'build_conf': build_conf, 'name_label': 'name_label', 'repo_info': repo_info, }) rand = '12345' timestr = '20170731111111' (flexmock(sys.modules['osbs.build.user_params']).should_receive( 'utcnow').once().and_return( datetime.datetime.strptime(timestr, '%Y%m%d%H%M%S'))) (flexmock(osbs.utils).should_receive('utcnow').once().and_return( datetime.datetime.strptime(timestr, '%Y%m%d%H%M%S'))) (flexmock(random).should_receive('randrange').times(2).with_args( 10**(len(rand) - 1), 10**len(rand)).and_return(int(rand))) spec = BuildUserParams.make_params(**param_kwargs) expected_json = { "base_image": "buildroot:old", "component": TEST_COMPONENT, "compose_ids": [1, 2], "filesystem_koji_task_id": TEST_FILESYSTEM_KOJI_TASK_ID, "include_koji_repo": True, "git_branch": TEST_GIT_BRANCH, "git_ref": TEST_GIT_REF, "git_uri": TEST_GIT_URI, "image_tag": "{}/{}:tothepoint-{}-{}-x86_64".format(TEST_USER, TEST_COMPONENT, rand, timestr), "kind": "build_user_params", "koji_parent_build": "fedora-26-9", "koji_target": "tothepoint", "name": "path-master-cd1e4" + f'{rand}-{timestr}', 'operator_bundle_replacement_pullspecs': { 'foo/fedora:30': 'bar/fedora@sha256:deadbeef' }, 'parent_images_digests': { 'registry.fedorahosted.org/fedora:29': { 'x86_64': 'registry.fedorahosted.org/fedora@sha256:8b96f2f9f88179a065738b2b37' '35e386efb2534438c2a2f45b74358c0f344c81' } }, "platform": "x86_64", "platforms": ["x86_64"], "release": "29", "user": TEST_USER, "userdata": userdata, } assert spec.to_json() == json.dumps(expected_json, sort_keys=True) spec2 = BuildUserParams.from_json(spec.to_json()) assert spec2.to_json() == json.dumps(expected_json, sort_keys=True)