def test_inspect_built_image(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) flexmock(InsideBuilder, ensure_is_built=None) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) built_inspect = b.inspect_built_image() assert built_inspect is not None assert built_inspect["Id"] is not None
def test_inspect_built_image(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() built_inspect = b.inspect_built_image() assert built_inspect is not None assert built_inspect["Id"] is not None # clean t.remove_image(build_result.image_id)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.autorebuild_canceled = False self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = [] self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_failed or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: self._base_image_inspect = self.builder.tasker.inspect_image(self.builder.base_image) return self._base_image_inspect def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) finally: self.source.remove_tmpdir()
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: dict, arguments for build-step plugins """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.fs_watcher = FSWatcher() self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self._base_image_inspect = None self.default_image_build_method = CONTAINER_DEFAULT_BUILD_METHOD self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per rpm_util.parse_rpm_output self.image_components = None if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_result.is_failed() or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: try: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) except docker.errors.NotFound: # If the base image cannot be found throw KeyError - as this property should behave # like a dict raise KeyError("Unprocessed base image Dockerfile cannot be inspected") return self._base_image_inspect def throw_canceled_build_exception(self, *args, **kwargs): self.build_canceled = True raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, target_registries_insecure=False, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param target_registries_insecure: bool, allow connecting to target registries over plain http """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = None self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries( target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_failed or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) return self._base_image_inspect def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise build_result = self.builder.build() self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: self.source.remove_tmpdir() exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run() except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: dict, arguments for build-step plugins """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per rpm_util.parse_rpm_output self.image_components = None if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_result.is_failed() or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: try: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) except docker.errors.NotFound: # If the base image cannot be found throw KeyError - as this property should behave # like a dict raise KeyError("Unprocessed base image Dockerfile cannot be inspected") return self._base_image_inspect def throw_canceled_build_exception(self, *args, **kwargs): self.build_canceled = True raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResult """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise logger.info("running buildstep plugins") buildstep_runner = BuildStepPluginsRunner(self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.d.history(self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{"diff_id": diff_id, "size": layer['Size']} for (diff_id, layer) in zip(diff_ids, reversed(history))] postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%r) so running exit plugins", ex) raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, image, source=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: list of dicts, arguments for pre-build plugins :param prepublish_plugins: list of dicts, arguments for test-build plugins :param postbuild_plugins: list of dicts, arguments for post-build plugins :param exit_plugins: list of dicts, arguments for exit plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: list of dicts, arguments for build-step plugins """ tmp_dir = tempfile.mkdtemp() if source is None: self.source = DummySource(None, None, tmpdir=tmp_dir) else: self.source = get_source_instance_for(source, tmpdir=tmp_dir) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.fs_watcher = FSWatcher() self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self.default_image_build_method = CONTAINER_DEFAULT_BUILD_METHOD self.storage_transport = DOCKER_STORAGE_TRANSPORT_NAME # list of images pulled during the build, to be deleted after the build self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per utils.rpm.parse_rpm_output self.image_components = None # List of all yum repos. The provided repourls might be changed (by resolve_composes) when # inheritance is enabled. This property holds the updated list of repos, allowing # post-build plugins (such as koji_import) to record them. self.all_yum_repourls = None # info about pre-declared build, build-id and token self.reserved_build_id = None self.reserved_token = None self.triggered_after_koji_task = None self.koji_source_nvr = {} self.koji_source_source_url = None self.koji_source_manifest = None # Plugins can store info here using the @annotation, @annotation_map, # @label and @label_map decorators from atomic_reactor.metadata self.annotations = {} self.labels = {} if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) def get_orchestrate_build_plugin(self): """ Get the orchestrate_build plugin configuration for this workflow if present (will be present for orchestrator, not for worker). :return: orchestrate_build plugin configuration dict :raises: ValueError if the orchestrate_build plugin is not present """ for plugin in self.buildstep_plugins_conf or []: if plugin['name'] == PLUGIN_BUILD_ORCHESTRATE_KEY: return plugin # Not an orchestrator build raise ValueError('Not an orchestrator build') def is_orchestrator_build(self): """ Check if the plugin configuration for this workflow is for an orchestrator build or a worker build. :return: True if orchestrator build, False if worker build """ try: self.get_orchestrate_build_plugin() return True except ValueError: return False @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_result.is_failed() or self.plugin_failed def throw_canceled_build_exception(self, *args, **kwargs): self.build_canceled = True raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResult """ exception_being_handled = False self.builder = InsideBuilder(self.source, self.image) # Make sure exit_runner is defined for finally block exit_runner = None try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise # we are delaying initialization, because prebuild plugin reactor_config # might change build method buildstep_runner = BuildStepPluginsRunner( self.builder.tasker, self, self.buildstep_plugins_conf, plugin_files=self.plugin_files) logger.info("running buildstep plugins") try: self.build_result = buildstep_runner.run() if self.build_result.is_failed(): raise PluginFailedException(self.build_result.fail_reason) except PluginFailedException as ex: self.builder.is_built = False logger.error('buildstep plugin failed: %s', ex) raise self.builder.is_built = True if self.build_result.is_image_available(): self.builder.image_id = self.build_result.image_id # run prepublish plugins try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise if self.build_result.is_image_available(): self.built_image_inspect = self.builder.inspect_built_image() history = self.builder.tasker.get_image_history( self.builder.image_id) diff_ids = self.built_image_inspect[INSPECT_ROOTFS][ INSPECT_ROOTFS_LAYERS] # diff_ids is ordered oldest first # history is ordered newest first # We want layer_sizes to be ordered oldest first self.layer_sizes = [{ "diff_id": diff_id, "size": layer['Size'] } for (diff_id, layer) in zip(diff_ids, reversed(history))] try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return self.build_result except Exception as ex: logger.debug("caught exception (%s) so running exit plugins", exception_message(ex)) exception_being_handled = True raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, keep_going=True, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) # raise exception only in case that there is no previous exception being already # handled to prevent replacing original exceptions (root cause) with exceptions # from exit plugins if not exception_being_handled: raise ex finally: self.source.remove_tmpdir() self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = [] self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) @property def build_process_failed(self): """ Has any aspect of the build process failed? """ return self.build_failed or self.plugin_failed # inspect base image lazily just before it's needed - pre plugins may change the base image @property def base_image_inspect(self): if self._base_image_inspect is None: self._base_image_inspect = self.builder.tasker.inspect_image( self.builder.base_image) return self._base_image_inspect def throw_canceled_build_exception(self, *args, **kwargs): raise BuildCanceledException("Build was canceled") def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner( self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise except AutoRebuildCanceledException as ex: logger.info(str(ex)) self.autorebuild_canceled = True raise start_time = datetime.datetime.now() self.plugins_timestamps['dockerbuild'] = start_time.isoformat() build_result = self.builder.build() try: finish_time = datetime.datetime.now() duration = finish_time - start_time seconds = duration.total_seconds() logger.debug("build finished in %ds", seconds) self.plugins_durations['dockerbuild'] = seconds except Exception: logger.exception("failed to save build duration") self.build_logs = build_result.logs self.build_failed = build_result.is_failed() if build_result.is_failed(): # The docker build failed. Finish here, just run the # exit plugins (from the 'finally:' block below). self.plugins_errors['dockerbuild'] = '' return build_result self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner( self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise postbuild_runner = PostBuildPluginsRunner( self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise return build_result finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self.builder.tasker, self, self.exit_plugins_conf, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) raise finally: self.source.remove_tmpdir() signal.signal(signal.SIGTERM, signal.SIG_DFL)
class DockerBuildWorkflow(object): """ This class defines a workflow for building images: 1. pull image from registry 2. tag it properly if needed 3. obtain source 4. build image 5. tag it 6. push it to registries """ def __init__(self, source, image, parent_registry=None, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, plugin_files=None, parent_registry_insecure=False, target_registries_insecure=False, dont_pull_base_image=False, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param parent_registry: str, registry to pull base image from :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param parent_registry_insecure: bool, allow connecting to parent registry over plain http :param target_registries_insecure: bool, allow connecting to target registries over plain http :param dont_pull_base_image: bool, don't pull or update base image specified in dockerfile """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.parent_registry = parent_registry self.parent_registry_insecure = parent_registry_insecure self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.prebuild_results = {} self.postbuild_results = {} self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = None self.built_image_inspect = None self.base_image_inspect = None self.dont_pull_base_image = dont_pull_base_image self.pulled_base_images = set() # squashed image tarball # set by squash plugin self.exported_squashed_image = {} self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries(target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs) def build_docker_image(self): """ build docker image :return: BuildResults """ self.builder = InsideBuilder(self.source, self.image) try: if not self.dont_pull_base_image: self.pulled_base_images = self.builder.pull_base_image(self.parent_registry, insecure=self.parent_registry_insecure) self.base_image_inspect = self.builder.tasker.inspect_image(self.builder.base_image) # time to run pre-build plugins, so they can access cloned repo, # base image logger.info("running pre-build plugins") prebuild_runner = PreBuildPluginsRunner(self.builder.tasker, self, self.prebuild_plugins_conf, plugin_files=self.plugin_files) try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) return build_result = self.builder.build() self.build_logs = build_result.logs if not build_result.is_failed(): self.built_image_inspect = self.builder.inspect_built_image() # run prepublish plugins prepublish_runner = PrePublishPluginsRunner(self.builder.tasker, self, self.prepublish_plugins_conf, plugin_files=self.plugin_files) try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) return if not build_result.is_failed(): for registry in self.push_conf.docker_registries: self.builder.push_built_image(registry.uri, insecure=registry.insecure) postbuild_runner = PostBuildPluginsRunner(self.builder.tasker, self, self.postbuild_plugins_conf, plugin_files=self.plugin_files) try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) return return build_result finally: self.source.remove_tmpdir()