def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = DockerTasker() info, version = self.tasker.get_info(), self.tasker.get_version() logger.debug(json.dumps(info, indent=2)) logger.info(json.dumps(version, indent=2)) # arguments for build self.source = source self.base_image_id = None self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) # get info about base image from dockerfile self.df_path, self.df_dir = self.source.get_dockerfile_path() self.set_base_image(df_parser(self.df_path).baseimage) logger.debug("base image specified in dockerfile = '%s'", self.base_image) if not self.base_image.tag: self.base_image.tag = 'latest'
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = DockerTasker() info, version = self.tasker.get_info(), self.tasker.get_version() logger.debug(json.dumps(info, indent=2)) logger.info(json.dumps(version, indent=2)) # arguments for build self.source = source self.base_image_id = None self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) # get info about base image from dockerfile self.df_path, self.df_dir = self.source.get_dockerfile_path() self.base_image = ImageName.parse(DockerfileParser(self.df_path).baseimage) logger.debug("base image specified in dockerfile = '%s'", self.base_image) if not self.base_image.tag: self.base_image.tag = 'latest'
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = DockerTasker() info, version = self.tasker.get_info(), self.tasker.get_version() logger.debug(json.dumps(info, indent=2)) logger.info(json.dumps(version, indent=2)) # arguments for build self.source = source self.base_image = None self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) # get info about base image from dockerfile build_file_path, build_file_dir = self.source.get_build_file_path() self.df_dir = build_file_dir self._df_path = None # If the Dockerfile will be entirely generated from the container.yaml # (in the Flatpak case, say), then a plugin needs to create the Dockerfile # and set the base image if build_file_path.endswith(DOCKERFILE_FILENAME): self.set_df_path(build_file_path)
def build_docker_image(self) -> None: """Start the container build. In general, all plugins run in order and the execution can be terminated by sending SIGTERM signal to atomic-reactor. When argument ``keep_plugins_running`` is set, the specified plugins are all ensured to be executed and the SIGTERM signal is ignored. """ print_version_of_tools() try: self.fs_watcher.start() if self.keep_plugins_running: signal.signal(signal.SIGTERM, signal.SIG_IGN) else: signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) runner = PluginsRunner(self, self.plugins_conf, self.plugin_files, self.keep_plugins_running, plugins_results=self.data.plugins_results) runner.run() finally: signal.signal(signal.SIGTERM, signal.SIG_DFL) self.fs_watcher.finish()
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = DockerTasker() info, version = self.tasker.get_info(), self.tasker.get_version() logger.debug(json.dumps(info, indent=2)) logger.info(json.dumps(version, indent=2)) # arguments for build self.source = source self.base_image = None self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) # get info about base image from dockerfile build_file_path, build_file_dir = self.source.get_build_file_path() self.df_dir = build_file_dir self._df_path = None # If the build file isn't a Dockerfile, but say, a flatpak.json then a # plugin needs to create the Dockerfile and set the base image if build_file_path.endswith(DOCKERFILE_FILENAME): self.set_df_path(build_file_path)
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = ContainerTasker() # arguments for build self.source = source self.base_image = None self.base_image_insecure = None self.base_image_dockercfg_path = None self.original_base_image = None self._base_image_inspect = None self.parents_pulled = False self.parent_images = { } # dockerfile ImageName => locally available ImageName self._parent_images_inspect = {} # locally available image => inspect self.parents_ordered = [] self.parent_images_digests = {} self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) self.base_from_scratch = False # last parent in Dockerfile is custom base image, # used for plugins custom base image handling self.custom_base_image = False # any parent in Dockerfile is custom base image, # used for plugins custom base image handling self.custom_parent_image = False # get info about base image from dockerfile build_file_path, build_file_dir = self.source.get_build_file_path() self.df_dir = build_file_dir self._df_path = None self.original_df = None self.buildargs = {} # --buildargs for container build # If the Dockerfile will be entirely generated from the container.yaml # (in the Flatpak case, say), then a plugin needs to create the Dockerfile # and set the base image if build_file_path.endswith(DOCKERFILE_FILENAME): self.set_df_path(build_file_path)
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = DockerTasker() info, version = self.tasker.get_info(), self.tasker.get_version() logger.debug(json.dumps(info, indent=2)) logger.info(json.dumps(version, indent=2)) # arguments for build self.source = source self.base_image = None self.original_base_image = None self._base_image_inspect = None self.parents_pulled = False self.parent_images = {} # dockerfile ImageName => locally available ImageName self._parent_images_inspect = {} # locally available image => inspect self.parents_ordered = [] self.parent_images_digests = {} self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) self.base_from_scratch = False # last parent in Dockerfile is custom base image, # used for plugins custom base image handling self.custom_base_image = False # any parent in Dockerfile is custom base image, # used for plugins custom base image handling self.custom_parent_image = False # get info about base image from dockerfile build_file_path, build_file_dir = self.source.get_build_file_path() self.df_dir = build_file_dir self._df_path = None # If the Dockerfile will be entirely generated from the container.yaml # (in the Flatpak case, say), then a plugin needs to create the Dockerfile # and set the base image if build_file_path.endswith(DOCKERFILE_FILENAME): self.set_df_path(build_file_path)
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = DockerTasker() info, version = self.tasker.get_info(), self.tasker.get_version() logger.debug(json.dumps(info, indent=2)) logger.info(json.dumps(version, indent=2)) # arguments for build self.source = source self.base_image = None self.original_base_image = None self._base_image_inspect = None self.parents_pulled = False self.parent_images = { } # dockerfile ImageName => locally available ImageName self._parent_images_inspect = {} # locally available image => inspect self.parents_ordered = [] self.parent_images_digests = DigestCollector() self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) self.base_from_scratch = False # get info about base image from dockerfile build_file_path, build_file_dir = self.source.get_build_file_path() self.df_dir = build_file_dir self._df_path = None # If the Dockerfile will be entirely generated from the container.yaml # (in the Flatpak case, say), then a plugin needs to create the Dockerfile # and set the base image if build_file_path.endswith(DOCKERFILE_FILENAME): self.set_df_path(build_file_path)
def __init__(self, source, image, **kwargs): """ """ LastLogger.__init__(self) BuilderStateMachine.__init__(self) print_version_of_tools() self.tasker = ContainerTasker() # arguments for build self.source = source # configuration of source_registy and pull_registries with insecure and # dockercfg_path, by registry key self.pull_registries = {} self.dockerfile_images = DockerfileImages([]) self._base_image_inspect = None self.parents_pulled = False self._parent_images_inspect = {} # locally available image => inspect self.parent_images_digests = {} self.image_id = None self.built_image_info = None self.image = ImageName.parse(image) # get info about base image from dockerfile build_file_path, build_file_dir = self.source.get_build_file_path() self.df_dir = build_file_dir self._df_path = None self.original_df = None self.buildargs = {} # --buildargs for container build # If the Dockerfile will be entirely generated from the container.yaml # (in the Flatpak case, say), then a plugin needs to create the Dockerfile # and set the base image if build_file_path.endswith(DOCKERFILE_FILENAME): self.set_df_path(build_file_path)
def test_print_versions_of_tools(): print_version_of_tools()
def build_docker_image(self) -> None: """ build docker image """ print_version_of_tools() exception_being_handled = False # Make sure exit_runner is defined for finally block exit_runner = None try: self.fs_watcher.start() signal.signal(signal.SIGTERM, self.throw_canceled_build_exception) prebuild_runner = PreBuildPluginsRunner( self, self.plugins.prebuild, plugin_files=self.plugin_files) prepublish_runner = PrePublishPluginsRunner( self, self.plugins.prepublish, plugin_files=self.plugin_files) postbuild_runner = PostBuildPluginsRunner( self, self.plugins.postbuild, plugin_files=self.plugin_files) # time to run pre-build plugins, so they can access cloned repo logger.info("running pre-build plugins") try: prebuild_runner.run() except PluginFailedException as ex: logger.error("one or more prebuild plugins failed: %s", ex) raise # we are delaying initialization, because prebuild plugin reactor_config # might change build method buildstep_runner = BuildStepPluginsRunner( self, self.plugins.buildstep, plugin_files=self.plugin_files) logger.info("running buildstep plugins") try: buildstep_runner.run() except PluginFailedException as ex: logger.error('buildstep plugin failed: %s', ex) raise # run prepublish plugins try: prepublish_runner.run() except PluginFailedException as ex: logger.error("one or more prepublish plugins failed: %s", ex) raise try: postbuild_runner.run() except PluginFailedException as ex: logger.error("one or more postbuild plugins failed: %s", ex) raise except Exception as ex: logger.debug("caught exception (%s) so running exit plugins", exception_message(ex)) exception_being_handled = True raise finally: # We need to make sure all exit plugins are executed signal.signal(signal.SIGTERM, lambda *args: None) exit_runner = ExitPluginsRunner(self, self.plugins.exit, keep_going=True, plugin_files=self.plugin_files) try: exit_runner.run(keep_going=True) except PluginFailedException as ex: logger.error("one or more exit plugins failed: %s", ex) # raise exception only in case that there is no previous exception being already # handled to prevent replacing original exceptions (root cause) with exceptions # from exit plugins if not exception_being_handled: raise ex finally: self.fs_watcher.finish() signal.signal(signal.SIGTERM, signal.SIG_DFL)