def test_parent_image_inspect(insecure, parents_pulled, tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) b.tasker.build_method = default_build_method b.parents_pulled = parents_pulled provided_imagename = ImageName.parse(provided_image) registry_name = "registry.example.com" provided_imagename.registry = registry_name b.pull_registries = { registry_name: { 'insecure': insecure, 'dockercfg_path': str(tmpdir) } } if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').with_args( provided_imagename, provided_imagename.registry, insecure, str(tmpdir)).and_return({'Id': 123})) built_inspect = b.parent_image_inspect(provided_imagename) assert built_inspect is not None assert built_inspect["Id"] is not None
def test_base_image_inspect(parents_pulled, base_exist, tmpdir, source_params): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b._parents_pulled = parents_pulled if base_exist: if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_return({'Id': 123})) built_inspect = b.base_image_inspect assert built_inspect is not None assert built_inspect["Id"] is not None else: if parents_pulled: response = flexmock(content="not found", status_code=404) (flexmock( docker.APIClient).should_receive('inspect_image').and_raise( docker.errors.NotFound, "xyz", response)) with pytest.raises(KeyError): b.base_image_inspect else: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_raise(NotImplementedError)) with pytest.raises(NotImplementedError): b.base_image_inspect
def test_copy_from_is_blocked(tmpdir): """test when user has specified COPY --from=image (instead of builder)""" dfp = df_parser(str(tmpdir)) if MOCK: mock_docker() source = {'provider': 'path', 'uri': 'file://' + str(tmpdir), 'tmpdir': str(tmpdir)} dfp.content = dedent("""\ FROM monty AS vikings FROM python COPY --from=vikings /spam/eggs /bin/eggs COPY --from=0 /spam/eggs /bin/eggs COPY src dest """) # init calls set_df_path, which should not raise an error: InsideBuilder(get_source_instance_for(source), 'built-img') dfp.content = dedent("""\ FROM monty as vikings FROM python # using a stage name we haven't seen should break: COPY --from=notvikings /spam/eggs /bin/eggs """) with pytest.raises(RuntimeError) as exc_info: InsideBuilder(get_source_instance_for(source), 'built-img') # calls set_df_path at init assert "FROM notvikings AS source" in str(exc_info.value) dfp.content = dedent("""\ FROM monty as vikings # using an index we haven't seen should break: COPY --from=5 /spam/eggs /bin/eggs """) with pytest.raises(RuntimeError) as exc_info: InsideBuilder(get_source_instance_for(source), 'built-img') # calls set_df_path at init assert "COPY --from=5" in str(exc_info.value)
def test_build_bad_git_commit_dockerfile(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(build_should_fail=True, provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) build_result = b.build() assert build_result.is_failed()
def test_parent_images(tmpdir, source_params): if MOCK: mock_docker() s = get_source_instance_for(source_params) b = InsideBuilder(s, '') orig_base = str(b.base_image) assert orig_base in b.parent_images assert b.parent_images[orig_base] is None b.set_base_image("spam:eggs") assert b.parent_images[orig_base] == "spam:eggs"
def test_inspect_base_image(tmpdir, source_params): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') built_inspect = b.inspect_base_image() assert built_inspect is not None assert built_inspect["Id"] is not None
def test_pull_base_image_with_registry(tmpdir): mock_docker() source_params = {"provider": "path", "uri": "file://%s" % str(tmpdir), "tmpdir": str(tmpdir)} with open(os.path.join(str(tmpdir), DOCKERFILE_FILENAME), "wt") as fp: fp.writelines(["FROM %s/namespace/repo:tag\n" % LOCALHOST_REGISTRY]) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, "") pulled_tags = b.pull_base_image(LOCALHOST_REGISTRY, insecure=True) assert isinstance(pulled_tags, set)
def test_no_base_image(tmpdir): if MOCK: mock_docker() source = {'provider': 'path', 'uri': 'file://' + DOCKERFILE_OK_PATH, 'tmpdir': str(tmpdir)} b = InsideBuilder(get_source_instance_for(source), 'built-img') dfp = df_parser(str(tmpdir)) dfp.content = "# no FROM\nADD spam /eggs" with pytest.raises(RuntimeError) as exc: b.set_df_path(str(tmpdir)) assert "no base image specified" in str(exc.value)
def test_get_base_image_info(tmpdir, source_params): if MOCK: mock_docker(provided_image_repotags='fedora:latest') source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') built_inspect = b.get_base_image_info() assert built_inspect is not None assert built_inspect["Id"] is not None assert built_inspect["RepoTags"] is not None
def test_parent_images(parents_pulled, tmpdir, source_params): if MOCK: mock_docker() s = get_source_instance_for(source_params) b = InsideBuilder(s, '') orig_base = b.base_image assert orig_base in b.parent_images assert b.parent_images[orig_base] is None b.set_base_image("spam:eggs", parents_pulled=parents_pulled) assert b.parent_images[orig_base] == ImageName.parse("spam:eggs") assert b._parents_pulled == parents_pulled
def test_build_image(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() assert t.inspect_image(build_result.image_id) # clean t.remove_image(build_result.image_id)
def test_parent_images_to_str(tmpdir, caplog): if MOCK: mock_docker() source = {'provider': 'path', 'uri': 'file://' + DOCKERFILE_OK_PATH, 'tmpdir': str(tmpdir)} b = InsideBuilder(get_source_instance_for(source), 'built-img') b.set_base_image("spam") b.parent_images["bacon"] = None expected_results = { "fedora:latest": "spam:latest" } assert b.parent_images_to_str() == expected_results assert "None in: base bacon has parent None" in caplog.text
def test_parent_images(parents_pulled, tmpdir, source_params): if MOCK: mock_docker() s = get_source_instance_for(source_params) b = InsideBuilder(s, '') orig_base = b.base_image if not b.base_from_scratch: assert orig_base in b.parent_images assert b.parent_images[orig_base] is None b.set_base_image("spam:eggs", parents_pulled=parents_pulled) assert b.parent_images[orig_base] == ImageName.parse("spam:eggs") assert b.parents_pulled == parents_pulled
def test_get_image_built_info(tmpdir, source_params, image, will_raise): if MOCK: mock_docker(provided_image_repotags=image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, image) if will_raise: with pytest.raises(Exception): b.get_built_image_info() else: b.get_built_image_info()
def test_inspect_built_image(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) flexmock(InsideBuilder, ensure_is_built=None) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) built_inspect = b.inspect_built_image() assert built_inspect is not None assert built_inspect["Id"] is not None
def test_build_generator_raises(tmpdir): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image, build_should_fail=True, build_should_fail_generator=True) source_params = SOURCE.copy() source_params.update({"tmpdir": str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() assert isinstance(build_result, ExceptionBuildResult) assert build_result.is_failed() assert "build generator failure" in build_result.logs
def test_parent_images_to_str(tmpdir, caplog): if MOCK: mock_docker() source = { 'provider': 'path', 'uri': 'file://' + DOCKERFILE_OK_PATH, 'tmpdir': str(tmpdir) } b = InsideBuilder(get_source_instance_for(source), 'built-img') b.dockerfile_images = DockerfileImages(['fedora:latest', 'bacon']) b.dockerfile_images['fedora:latest'] = "spam" expected_results = {"fedora:latest": "spam:latest"} assert b.parent_images_to_str() == expected_results assert "None in: base bacon:latest has parent None" in caplog.text
def test_get_base_image_info(tmpdir, source_params, image, will_raise): if MOCK: mock_docker(provided_image_repotags=image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, image) if will_raise: with pytest.raises(Exception): built_inspect = b.get_base_image_info() else: built_inspect = b.get_base_image_info() assert built_inspect is not None assert built_inspect["Id"] is not None assert built_inspect["RepoTags"] is not None
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: self.source.remove_tmpdir() exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run() except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex)
def test_build_generator_raises(tmpdir): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image, build_should_fail=True, build_should_fail_generator=True) source_params = SOURCE.copy() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() assert isinstance(build_result, ExceptionBuildResult) assert build_result.is_failed() assert 'build generator failure' in build_result.logs
def test_parent_image_inspect(parents_pulled, tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) b._parents_pulled = parents_pulled if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_return({'Id': 123})) built_inspect = b.parent_image_inspect(provided_image) assert built_inspect is not None assert built_inspect["Id"] is not None
def test_pull_base_image(tmpdir, source_params): if MOCK: mock_docker() source_params.update({"tmpdir": str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, "") pulled_tags = b.pull_base_image(LOCALHOST_REGISTRY, insecure=True) assert isinstance(pulled_tags, set) assert len(pulled_tags) == 2 for reg_img_name in pulled_tags: reg_img_name = ImageName.parse(reg_img_name) assert t.inspect_image(reg_img_name) is not None assert reg_img_name.repo == git_base_image.repo assert reg_img_name.tag == git_base_image.tag # clean t.remove_image(git_base_image)
def test_different_custom_base_images(tmpdir): if MOCK: mock_docker() source_params = {'provider': 'path', 'uri': 'file://' + DOCKERFILE_MULTISTAGE_CUSTOM_BAD_PATH, 'tmpdir': str(tmpdir)} s = get_source_instance_for(source_params) with pytest.raises(NotImplementedError) as exc: InsideBuilder(s, '') message = "multiple different custom base images aren't allowed in Dockerfile" assert message in str(exc.value)
def test_parent_image_inspect(parents_pulled, tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) b.parents_pulled = parents_pulled if not parents_pulled: (flexmock(atomic_reactor.util) .should_receive('get_inspect_for_image') .and_return({'Id': 123})) built_inspect = b.parent_image_inspect(provided_image) assert built_inspect is not None assert built_inspect["Id"] is not None
def test_ensure_built(tmpdir, source_params, is_built): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.is_built = is_built if is_built: assert b.ensure_is_built() == None with pytest.raises(Exception): b.ensure_not_built() else: assert b.ensure_not_built() == None with pytest.raises(Exception): b.ensure_is_built()
def test_base_image_inspect(tmpdir, source_params, parents_pulled, base_exist): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.parents_pulled = parents_pulled if b.base_from_scratch: base_exist = True if base_exist: if b.base_from_scratch: built_inspect = b.base_image_inspect assert built_inspect == {} else: if not parents_pulled: (flexmock(atomic_reactor.util) .should_receive('get_inspect_for_image') .and_return({'Id': 123})) built_inspect = b.base_image_inspect assert built_inspect is not None assert built_inspect["Id"] is not None else: if parents_pulled or b.custom_base_image: response = flexmock(content="not found", status_code=404) (flexmock(docker.APIClient) .should_receive('inspect_image') .and_raise(docker.errors.NotFound, "xyz", response)) with pytest.raises(KeyError): b.base_image_inspect else: (flexmock(atomic_reactor.util) .should_receive('get_inspect_for_image') .and_raise(NotImplementedError)) with pytest.raises(NotImplementedError): b.base_image_inspect
def test_base_image_inspect(tmpdir, source_params, parents_pulled, base_exist): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.tasker.build_method = default_build_method b.parents_pulled = parents_pulled if b.base_from_scratch: base_exist = True if base_exist: if b.base_from_scratch: built_inspect = b.base_image_inspect assert built_inspect == {} else: if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_return({'Id': 123})) built_inspect = b.base_image_inspect assert built_inspect is not None assert built_inspect["Id"] is not None else: if parents_pulled or b.custom_base_image: response = flexmock(content="not found", status_code=404) (flexmock( docker.APIClient).should_receive('inspect_image').and_raise( docker.errors.NotFound, "xyz", response)) with pytest.raises(KeyError): b.base_image_inspect # pylint: disable=pointless-statement; is a property else: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_raise(NotImplementedError)) with pytest.raises(NotImplementedError): b.base_image_inspect # pylint: disable=pointless-statement; is a property
def test_get_base_image_info(tmpdir, source_params, image, will_raise): if DOCKERFILE_MULTISTAGE_CUSTOM_PATH in source_params['uri']: return if MOCK: mock_docker(provided_image_repotags=image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, image) if b.base_from_scratch: will_raise = False if will_raise: with pytest.raises(Exception): built_inspect = b.get_base_image_info() else: built_inspect = b.get_base_image_info() if b.base_from_scratch: assert built_inspect is None else: assert built_inspect is not None assert built_inspect["Id"] is not None assert built_inspect["RepoTags"] is not None
def test_ensure_built(tmpdir, source_params, is_built): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.is_built = is_built if is_built: assert b.ensure_is_built() is None with pytest.raises(Exception): b.ensure_not_built() else: assert b.ensure_not_built() is None with pytest.raises(Exception): b.ensure_is_built()
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = [] self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_failed or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) return self._base_image_inspect def throw_canceled_build_exception(self, *args, **kwargs): raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise start_time = datetime.datetime.now() self.plugins_timestamps['dockerbuild'] = start_time.isoformat() build_result = self.builder.build() try: finish_time = datetime.datetime.now() duration = finish_time - start_time seconds = duration.total_seconds() logger.debug("build finished in %ds", seconds) self.plugins_durations['dockerbuild'] = seconds except Exception: logger.exception("failed to save build duration") self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). self.plugins_errors['dockerbuild'] = '' return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise start_time = datetime.datetime.now() self.plugins_timestamps['dockerbuild'] = start_time.isoformat() build_result = self.builder.build() try: finish_time = datetime.datetime.now() duration = finish_time - start_time seconds = duration.total_seconds() logger.debug("build finished in %ds", seconds) self.plugins_durations['dockerbuild'] = seconds except Exception: logger.exception("failed to save build duration") self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). self.plugins_errors['dockerbuild'] = '' return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir()
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) finally: self.source.remove_tmpdir()
def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: dict, arguments for build-step plugins """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.fs_watcher = FSWatcher() self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self._base_image_inspect = None self.default_image_build_method = CONTAINER_DEFAULT_BUILD_METHOD self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per rpm_util.parse_rpm_output self.image_components = None if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_result.is_failed() or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: try: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) except docker.errors.NotFound: # If the base image cannot be found throw KeyError - as this property should behave # like a dict raise KeyError("Unprocessed base image Dockerfile cannot be inspected") return self._base_image_inspect def throw_canceled_build_exception(self, *args, **kwargs): self.build_canceled = True raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, parent_registry=None, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, plugin_files=None, parent_registry_insecure=False, target_registries_insecure=False, dont_pull_base_image=False, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param parent_registry: str, registry to pull base image from :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param parent_registry_insecure: bool, allow connecting to parent registry over plain http :param target_registries_insecure: bool, allow connecting to target registries over plain http :param dont_pull_base_image: bool, don't pull or update base image specified in dockerfile """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.parent_registry = parent_registry self.parent_registry_insecure = parent_registry_insecure self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.prebuild_results = {} self.postbuild_results = {} self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = None self.built_image_inspect = None self.base_image_inspect = None self.dont_pull_base_image = dont_pull_base_image self.pulled_base_images = set() # squashed image tarball # set by squash plugin self.exported_squashed_image = {} self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries(target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: if not self.dont_pull_base_image: self.pulled_base_images = self.builder.pull_base_image(self.parent_registry, insecure=self.parent_registry_insecure) self.base_image_inspect = self.builder.tasker.inspect_image(self.builder.base_image) # time to run pre-build plugins, so they can access cloned repo, # base image logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) return build_result = self.builder.build() self.build_logs = build_result.logs if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) return if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) return return build_result finally: self.source.remove_tmpdir()
def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.autorebuild_canceled = False self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = [] self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_failed or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: self._base_image_inspect = self.builder.tasker.inspect_image(self.builder.base_image) return self._base_image_inspect def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) finally: self.source.remove_tmpdir()
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise start_time = datetime.datetime.now() self.plugins_timestamps['dockerbuild'] = start_time.isoformat() build_result = self.builder.build() try: finish_time = datetime.datetime.now() duration = finish_time - start_time seconds = duration.total_seconds() logger.debug("build finished in %ds", seconds) self.plugins_durations['dockerbuild'] = seconds except Exception: logger.exception("failed to save build duration") self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). self.plugins_errors['dockerbuild'] = '' return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, image, source=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: list of dicts, arguments for pre-build plugins :param prepublish_plugins: list of dicts, arguments for test-build plugins :param postbuild_plugins: list of dicts, arguments for post-build plugins :param exit_plugins: list of dicts, arguments for exit plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: list of dicts, arguments for build-step plugins """ tmp_dir = tempfile.mkdtemp() if source is None: self.source = DummySource(None, None, tmpdir=tmp_dir) else: self.source = get_source_instance_for(source, tmpdir=tmp_dir) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.fs_watcher = FSWatcher() self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self.default_image_build_method = CONTAINER_DEFAULT_BUILD_METHOD self.storage_transport = DOCKER_STORAGE_TRANSPORT_NAME # list of images pulled during the build, to be deleted after the build self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per utils.rpm.parse_rpm_output self.image_components = None # List of all yum repos. The provided repourls might be changed (by resolve_composes) when # inheritance is enabled. This property holds the updated list of repos, allowing # post-build plugins (such as koji_import) to record them. self.all_yum_repourls = None # info about pre-declared build, build-id and token self.reserved_build_id = None self.reserved_token = None self.triggered_after_koji_task = None self.koji_source_nvr = {} self.koji_source_source_url = None self.koji_source_manifest = None # Plugins can store info here using the @annotation, @annotation_map, # @label and @label_map decorators from atomic_reactor.metadata self.annotations = {} self.labels = {} if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) def get_orchestrate_build_plugin(self): """ Get the orchestrate_build plugin configuration for this workflow if present (will be present for orchestrator, not for worker). :return: orchestrate_build plugin configuration dict :raises: ValueError if the orchestrate_build plugin is not present """ for plugin in self.buildstep_plugins_conf or []: if plugin['name'] == PLUGIN_BUILD_ORCHESTRATE_KEY: return plugin # Not an orchestrator build raise ValueError('Not an orchestrator build') def is_orchestrator_build(self): """ Check if the plugin configuration for this workflow is for an orchestrator build or a worker build. :return: True if orchestrator build, False if worker build """ try: self.get_orchestrate_build_plugin() return True except ValueError: return False @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_result.is_failed() or self.plugin_failed def throw_canceled_build_exception(self, *args, **kwargs): self.build_canceled = True raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResult """ exception_being_handled = False self.builder = InsideBuilder(self.source, self.image) # Make sure exit_runner is defined for finally block exit_runner = None try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise # we are delaying initialization, because prebuild plugin reactor_config # might change build method buildstep_runner = BuildStepPluginsRunner( self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) logger.info("running buildstep plugins") try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.get_image_history( self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][ INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{ "diff_id": diff_id, "size": layer['Size'] } for (diff_id, layer) in zip(diff_ids, reversed(history))] try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%s) so running exit plugins", exception_message(ex)) exception_being_handled = True raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, keep_going=True, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) # raise exception only in case that there is no previous exception being already # handled to prevent replacing original exceptions (root cause) with exceptions # from exit plugins if not exception_being_handled: raise ex finally: self.source.remove_tmpdir() self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)
def build_docker_image(self): """ build docker image :return: BuildResult """ exception_being_handled = False self.builder = InsideBuilder(self.source, self.image) # Make sure exit_runner is defined for finally block exit_runner = None try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise # we are delaying initialization, because prebuild plugin reactor_config # might change build method buildstep_runner = BuildStepPluginsRunner( self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) logger.info("running buildstep plugins") try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.get_image_history( self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][ INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{ "diff_id": diff_id, "size": layer['Size'] } for (diff_id, layer) in zip(diff_ids, reversed(history))] try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%s) so running exit plugins", exception_message(ex)) exception_being_handled = True raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, keep_going=True, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) # raise exception only in case that there is no previous exception being already # handled to prevent replacing original exceptions (root cause) with exceptions # from exit plugins if not exception_being_handled: raise ex finally: self.source.remove_tmpdir() self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)
def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner( self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: self.source.remove_tmpdir() exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run() except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, target_registries_insecure=False, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param target_registries_insecure: bool, allow connecting to target registries over plain http """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = None self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries( target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_failed or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) return self._base_image_inspect def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: self.source.remove_tmpdir() exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run() except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex)
def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) finally: self.source.remove_tmpdir()
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: dict, arguments for build-step plugins """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per rpm_util.parse_rpm_output self.image_components = None if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_result.is_failed() or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: try: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) except docker.errors.NotFound: # If the base image cannot be found throw KeyError - as this property should behave # like a dict raise KeyError("Unprocessed base image Dockerfile cannot be inspected") return self._base_image_inspect def throw_canceled_build_exception(self, *args, **kwargs): self.build_canceled = True raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)