def test_copy_from_is_blocked(tmpdir): """test when user has specified COPY --from=image (instead of builder)""" dfp = df_parser(str(tmpdir)) if MOCK: mock_docker() source = {'provider': 'path', 'uri': 'file://' + str(tmpdir), 'tmpdir': str(tmpdir)} dfp.content = dedent("""\ FROM monty AS vikings FROM python COPY --from=vikings /spam/eggs /bin/eggs COPY --from=0 /spam/eggs /bin/eggs COPY src dest """) # init calls set_df_path, which should not raise an error: InsideBuilder(get_source_instance_for(source), 'built-img') dfp.content = dedent("""\ FROM monty as vikings FROM python # using a stage name we haven't seen should break: COPY --from=notvikings /spam/eggs /bin/eggs """) with pytest.raises(RuntimeError) as exc_info: InsideBuilder(get_source_instance_for(source), 'built-img') # calls set_df_path at init assert "FROM notvikings AS source" in str(exc_info.value) dfp.content = dedent("""\ FROM monty as vikings # using an index we haven't seen should break: COPY --from=5 /spam/eggs /bin/eggs """) with pytest.raises(RuntimeError) as exc_info: InsideBuilder(get_source_instance_for(source), 'built-img') # calls set_df_path at init assert "COPY --from=5" in str(exc_info.value)
def __init__(self, source, image, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, target_registries_insecure=False, openshift_build_selflink=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param target_registries_insecure: bool, allow connecting to target registries over plain http :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.autorebuild_canceled = False self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = [] self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries(target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def test_build_bad_git_commit_dockerfile(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(build_should_fail=True, provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) build_result = b.build() assert build_result.is_failed()
def test_different_custom_base_images(tmpdir): if MOCK: mock_docker() source_params = {'provider': 'path', 'uri': 'file://' + DOCKERFILE_MULTISTAGE_CUSTOM_BAD_PATH, 'tmpdir': str(tmpdir)} s = get_source_instance_for(source_params) with pytest.raises(NotImplementedError) as exc: InsideBuilder(s, '') message = "multiple different custom base images aren't allowed in Dockerfile" assert message in str(exc.value)
def __init__(self, source, image, parent_registry=None, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, plugin_files=None, parent_registry_insecure=False, target_registries_insecure=False, dont_pull_base_image=False, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param parent_registry: str, registry to pull base image from :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param parent_registry_insecure: bool, allow connecting to parent registry over plain http :param target_registries_insecure: bool, allow connecting to target registries over plain http :param dont_pull_base_image: bool, don't pull or update base image specified in dockerfile """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.parent_registry = parent_registry self.parent_registry_insecure = parent_registry_insecure self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.prebuild_results = {} self.postbuild_results = {} self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = None self.built_image_inspect = None self.base_image_inspect = None self.dont_pull_base_image = dont_pull_base_image self.pulled_base_images = set() # squashed image tarball # set by squash plugin self.exported_squashed_image = {} self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries(target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def test_no_base_image(tmpdir): if MOCK: mock_docker() source = {'provider': 'path', 'uri': 'file://' + DOCKERFILE_OK_PATH, 'tmpdir': str(tmpdir)} b = InsideBuilder(get_source_instance_for(source), 'built-img') dfp = df_parser(str(tmpdir)) dfp.content = "# no FROM\nADD spam /eggs" with pytest.raises(RuntimeError) as exc: b.set_df_path(str(tmpdir)) assert "no base image specified" in str(exc.value)
def test_inspect_base_image(tmpdir, source_params): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') built_inspect = b.inspect_base_image() assert built_inspect is not None assert built_inspect["Id"] is not None
def test_pull_base_image_with_registry(tmpdir): mock_docker() source_params = {"provider": "path", "uri": "file://%s" % str(tmpdir), "tmpdir": str(tmpdir)} with open(os.path.join(str(tmpdir), DOCKERFILE_FILENAME), "wt") as fp: fp.writelines(["FROM %s/namespace/repo:tag\n" % LOCALHOST_REGISTRY]) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, "") pulled_tags = b.pull_base_image(LOCALHOST_REGISTRY, insecure=True) assert isinstance(pulled_tags, set)
def test_parent_images(tmpdir, source_params): if MOCK: mock_docker() s = get_source_instance_for(source_params) b = InsideBuilder(s, '') orig_base = str(b.base_image) assert orig_base in b.parent_images assert b.parent_images[orig_base] is None b.set_base_image("spam:eggs") assert b.parent_images[orig_base] == "spam:eggs"
def test_copy_from_is_blocked(tmpdir): """test when user has specified COPY --from=image (instead of builder)""" dfp = df_parser(str(tmpdir)) if MOCK: mock_docker() source = { 'provider': 'path', 'uri': 'file://' + str(tmpdir), 'tmpdir': str(tmpdir) } dfp.content = dedent("""\ FROM monty AS vikings FROM python COPY --from=vikings /spam/eggs /bin/eggs COPY --from=0 /spam/eggs /bin/eggs COPY src dest """) # init calls set_df_path, which should not raise an error: InsideBuilder(get_source_instance_for(source), 'built-img') dfp.content = dedent("""\ FROM monty as vikings FROM python # using a stage name we haven't seen should break: COPY --from=notvikings /spam/eggs /bin/eggs """) with pytest.raises(RuntimeError) as exc_info: InsideBuilder(get_source_instance_for(source), 'built-img') # calls set_df_path at init assert "FROM notvikings AS source" in str(exc_info.value) dfp.content = dedent("""\ FROM monty as vikings # using an index we haven't seen should break: COPY --from=5 /spam/eggs /bin/eggs """) with pytest.raises(RuntimeError) as exc_info: InsideBuilder(get_source_instance_for(source), 'built-img') # calls set_df_path at init assert "COPY --from=5" in str(exc_info.value)
def test_get_base_image_info(tmpdir, source_params): if MOCK: mock_docker(provided_image_repotags='fedora:latest') source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') built_inspect = b.get_base_image_info() assert built_inspect is not None assert built_inspect["Id"] is not None assert built_inspect["RepoTags"] is not None
def test_parent_images(parents_pulled, tmpdir, source_params): if MOCK: mock_docker() s = get_source_instance_for(source_params) b = InsideBuilder(s, '') orig_base = b.base_image assert orig_base in b.parent_images assert b.parent_images[orig_base] is None b.set_base_image("spam:eggs", parents_pulled=parents_pulled) assert b.parent_images[orig_base] == ImageName.parse("spam:eggs") assert b._parents_pulled == parents_pulled
def test_parent_images_to_str(tmpdir, caplog): if MOCK: mock_docker() source = {'provider': 'path', 'uri': 'file://' + DOCKERFILE_OK_PATH, 'tmpdir': str(tmpdir)} b = InsideBuilder(get_source_instance_for(source), 'built-img') b.set_base_image("spam") b.parent_images["bacon"] = None expected_results = { "fedora:latest": "spam:latest" } assert b.parent_images_to_str() == expected_results assert "None in: base bacon has parent None" in caplog.text
def test_inspect_built_image(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) flexmock(InsideBuilder, ensure_is_built=None) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) built_inspect = b.inspect_built_image() assert built_inspect is not None assert built_inspect["Id"] is not None
def test_get_image_built_info(tmpdir, source_params, image, will_raise): if MOCK: mock_docker(provided_image_repotags=image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, image) if will_raise: with pytest.raises(Exception): b.get_built_image_info() else: b.get_built_image_info()
def test_build_image(tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() assert t.inspect_image(build_result.image_id) # clean t.remove_image(build_result.image_id)
def test_parent_images(parents_pulled, tmpdir, source_params): if MOCK: mock_docker() s = get_source_instance_for(source_params) b = InsideBuilder(s, '') orig_base = b.base_image if not b.base_from_scratch: assert orig_base in b.parent_images assert b.parent_images[orig_base] is None b.set_base_image("spam:eggs", parents_pulled=parents_pulled) assert b.parent_images[orig_base] == ImageName.parse("spam:eggs") assert b.parents_pulled == parents_pulled
def test_different_custom_base_images(tmpdir): if MOCK: mock_docker() source_params = { 'provider': 'path', 'uri': 'file://' + DOCKERFILE_MULTISTAGE_CUSTOM_BAD_PATH, 'tmpdir': str(tmpdir) } s = get_source_instance_for(source_params) with pytest.raises(NotImplementedError) as exc: InsideBuilder(s, '') message = "multiple different custom base images aren't allowed in Dockerfile" assert message in str(exc.value)
def _obtain_source_from_path_if_needed(self, local_path, container_path=CONTAINER_SHARE_PATH): build_json_path = os.path.join(local_path, BUILD_JSON) with open(build_json_path, 'r') as fp: build_json = json.load(fp) source = get_source_instance_for(build_json['source'], tmpdir=local_path) if source.provider == 'path': logger.debug('copying source from %s to %s', source.schemeless_path, local_path) source.get() logger.debug('verifying that %s exists: %s', local_path, os.path.exists(local_path)) # now modify the build json build_json['source']['uri'] = 'file://' + os.path.join(container_path, CONTAINER_SHARE_SOURCE_SUBDIR) with open(build_json_path, 'w') as fp: json.dump(build_json, fp)
def test_build_generator_raises(tmpdir): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image, build_should_fail=True, build_should_fail_generator=True) source_params = SOURCE.copy() source_params.update({"tmpdir": str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() assert isinstance(build_result, ExceptionBuildResult) assert build_result.is_failed() assert "build generator failure" in build_result.logs
def _obtain_source_from_path_if_needed(self, local_path, container_path=CONTAINER_SHARE_PATH): # TODO: maybe we should do this for any provider? (if we expand to various providers # like mercurial, we don't to force container to have mercurial installed, etc.) build_json_path = os.path.join(local_path, BUILD_JSON) with open(build_json_path, "r") as fp: build_json = json.load(fp) source = get_source_instance_for(build_json["source"], tmpdir=local_path) if source.provider == "path": logger.debug("copying source from %s to %s", source.schemeless_path, local_path) source.get() logger.debug("verifying that %s exists: %s", local_path, os.path.exists(local_path)) # now modify the build json build_json["source"]["uri"] = "file://" + os.path.join(container_path, CONTAINER_SHARE_SOURCE_SUBDIR) with open(build_json_path, "w") as fp: json.dump(build_json, fp)
def _obtain_source_from_path_if_needed(self, local_path, container_path=CONTAINER_SHARE_PATH): # TODO: maybe we should do this for any provider? (if we expand to various providers # like mercurial, we don't to force container to have mercurial installed, etc.) build_json_path = os.path.join(local_path, BUILD_JSON) with open(build_json_path, 'r') as fp: build_json = json.load(fp) source = get_source_instance_for(build_json['source'], tmpdir=local_path) if source.provider == 'path': logger.debug('copying source from %s to %s', source.schemeless_path, local_path) source.get() logger.debug('verifying that %s exists: %s', local_path, os.path.exists(local_path)) # now modify the build json build_json['source']['uri'] =\ 'file://' + os.path.join(container_path, CONTAINER_SHARE_SOURCE_SUBDIR) with open(build_json_path, 'w') as fp: json.dump(build_json, fp)
def test_get_base_image_info(tmpdir, source_params, image, will_raise): if MOCK: mock_docker(provided_image_repotags=image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, image) if will_raise: with pytest.raises(Exception): built_inspect = b.get_base_image_info() else: built_inspect = b.get_base_image_info() assert built_inspect is not None assert built_inspect["Id"] is not None assert built_inspect["RepoTags"] is not None
def test_build_generator_raises(tmpdir): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image, build_should_fail=True, build_should_fail_generator=True) source_params = SOURCE.copy() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, provided_image) build_result = b.build() assert isinstance(build_result, ExceptionBuildResult) assert build_result.is_failed() assert 'build generator failure' in build_result.logs
def _obtain_source_from_path_if_needed(self, local_path, container_path=CONTAINER_SHARE_PATH): # TODO: maybe we should do this for any provider? If we expand to various providers # like mercurial, then we don't want to force the container to have mercurial # installed, etc. build_json_path = os.path.join(local_path, BUILD_JSON) with open(build_json_path, 'r') as fp: build_json = json.load(fp) source = get_source_instance_for(build_json['source'], tmpdir=local_path) if source.provider == 'path': logger.debug('copying source from %s to %s', source.schemeless_path, local_path) source.get() logger.debug('verifying that %s exists: %s', local_path, os.path.exists(local_path)) # now modify the build json build_json['source']['uri'] = 'file://' + os.path.join(container_path, CONTAINER_SHARE_SOURCE_SUBDIR) with open(build_json_path, 'w') as fp: json.dump(build_json, fp)
def test_ensure_built(tmpdir, source_params, is_built): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.is_built = is_built if is_built: assert b.ensure_is_built() is None with pytest.raises(Exception): b.ensure_not_built() else: assert b.ensure_not_built() is None with pytest.raises(Exception): b.ensure_is_built()
def test_ensure_built(tmpdir, source_params, is_built): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.is_built = is_built if is_built: assert b.ensure_is_built() == None with pytest.raises(Exception): b.ensure_not_built() else: assert b.ensure_not_built() == None with pytest.raises(Exception): b.ensure_is_built()
def test_parent_image_inspect(parents_pulled, tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) b._parents_pulled = parents_pulled if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_return({'Id': 123})) built_inspect = b.parent_image_inspect(provided_image) assert built_inspect is not None assert built_inspect["Id"] is not None
def test_pull_base_image(tmpdir, source_params): if MOCK: mock_docker() source_params.update({"tmpdir": str(tmpdir)}) s = get_source_instance_for(source_params) t = DockerTasker() b = InsideBuilder(s, "") pulled_tags = b.pull_base_image(LOCALHOST_REGISTRY, insecure=True) assert isinstance(pulled_tags, set) assert len(pulled_tags) == 2 for reg_img_name in pulled_tags: reg_img_name = ImageName.parse(reg_img_name) assert t.inspect_image(reg_img_name) is not None assert reg_img_name.repo == git_base_image.repo assert reg_img_name.tag == git_base_image.tag # clean t.remove_image(git_base_image)
def test_parent_image_inspect(parents_pulled, tmpdir, source_params): provided_image = "test-build:test_tag" if MOCK: mock_docker(provided_image_repotags=provided_image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, provided_image) b.parents_pulled = parents_pulled if not parents_pulled: (flexmock(atomic_reactor.util) .should_receive('get_inspect_for_image') .and_return({'Id': 123})) built_inspect = b.parent_image_inspect(provided_image) assert built_inspect is not None assert built_inspect["Id"] is not None
def test_base_image_inspect(tmpdir, source_params, parents_pulled, base_exist): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.parents_pulled = parents_pulled if b.base_from_scratch: base_exist = True if base_exist: if b.base_from_scratch: built_inspect = b.base_image_inspect assert built_inspect == {} else: if not parents_pulled: (flexmock(atomic_reactor.util) .should_receive('get_inspect_for_image') .and_return({'Id': 123})) built_inspect = b.base_image_inspect assert built_inspect is not None assert built_inspect["Id"] is not None else: if parents_pulled or b.custom_base_image: response = flexmock(content="not found", status_code=404) (flexmock(docker.APIClient) .should_receive('inspect_image') .and_raise(docker.errors.NotFound, "xyz", response)) with pytest.raises(KeyError): b.base_image_inspect else: (flexmock(atomic_reactor.util) .should_receive('get_inspect_for_image') .and_raise(NotImplementedError)) with pytest.raises(NotImplementedError): b.base_image_inspect
def test_get_base_image_info(tmpdir, source_params, image, will_raise): if DOCKERFILE_MULTISTAGE_CUSTOM_PATH in source_params['uri']: return if MOCK: mock_docker(provided_image_repotags=image) source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, image) if b.base_from_scratch: will_raise = False if will_raise: with pytest.raises(Exception): built_inspect = b.get_base_image_info() else: built_inspect = b.get_base_image_info() if b.base_from_scratch: assert built_inspect is None else: assert built_inspect is not None assert built_inspect["Id"] is not None assert built_inspect["RepoTags"] is not None
def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: dict, arguments for build-step plugins """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.fs_watcher = FSWatcher() self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self._base_image_inspect = None self.default_image_build_method = CONTAINER_DEFAULT_BUILD_METHOD self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per rpm_util.parse_rpm_output self.image_components = None if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def test_errors(self, source, error): with pytest.raises(ValueError) as ex: get_source_instance_for(source) assert str(ex.value) == error
def test_recognizes_correct_provider(self, source, expected): assert isinstance(get_source_instance_for(source), expected)
def test_sourceconfig_bad_build_method(self, monkeypatch): s = get_source_instance_for({'provider': 'path', 'uri': DOCKERFILE_OK_PATH}) flexmock(atomic_reactor.source, CONTAINER_BUILD_METHODS=[]) with pytest.raises(AssertionError): s.config
def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = [] self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def __init__(self, source, image, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: dict, arguments for build-step plugins """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per rpm_util.parse_rpm_output self.image_components = None if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def __init__(self, source, image, target_registries=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, target_registries_insecure=False, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param target_registries: list of str, list of registries to push image to (might change in future) :param prebuild_plugins: dict, arguments for pre-build plugins :param prepublish_plugins: dict, arguments for test-build plugins :param postbuild_plugins: dict, arguments for post-build plugins :param plugin_files: list of str, load plugins also from these files :param target_registries_insecure: bool, allow connecting to target registries over plain http """ self.source = get_source_instance_for(source, tmpdir=tempfile.mkdtemp()) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.postbuild_results = {} self.build_failed = False self.plugin_failed = False self.plugin_files = plugin_files self.kwargs = kwargs self.builder = None self.build_logs = None self.built_image_inspect = None self._base_image_inspect = None self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() if target_registries: self.push_conf.add_docker_registries( target_registries, insecure=target_registries_insecure) # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def __init__(self, image, source=None, prebuild_plugins=None, prepublish_plugins=None, postbuild_plugins=None, exit_plugins=None, plugin_files=None, openshift_build_selflink=None, client_version=None, buildstep_plugins=None, **kwargs): """ :param source: dict, where/how to get source code to put in image :param image: str, tag for built image ([registry/]image_name[:tag]) :param prebuild_plugins: list of dicts, arguments for pre-build plugins :param prepublish_plugins: list of dicts, arguments for test-build plugins :param postbuild_plugins: list of dicts, arguments for post-build plugins :param exit_plugins: list of dicts, arguments for exit plugins :param plugin_files: list of str, load plugins also from these files :param openshift_build_selflink: str, link to openshift build (if we're actually running on openshift) without the actual hostname/IP address :param client_version: str, osbs-client version used to render build json :param buildstep_plugins: list of dicts, arguments for build-step plugins """ tmp_dir = tempfile.mkdtemp() if source is None: self.source = DummySource(None, None, tmpdir=tmp_dir) else: self.source = get_source_instance_for(source, tmpdir=tmp_dir) self.image = image self.prebuild_plugins_conf = prebuild_plugins self.buildstep_plugins_conf = buildstep_plugins self.prepublish_plugins_conf = prepublish_plugins self.postbuild_plugins_conf = postbuild_plugins self.exit_plugins_conf = exit_plugins self.prebuild_results = {} self.buildstep_result = {} self.postbuild_results = {} self.prepub_results = {} self.exit_results = {} self.build_result = BuildResult(fail_reason="not built") self.plugin_workspace = {} self.plugins_timestamps = {} self.plugins_durations = {} self.plugins_errors = {} self.autorebuild_canceled = False self.build_canceled = False self.plugin_failed = False self.plugin_files = plugin_files self.fs_watcher = FSWatcher() self.kwargs = kwargs self.builder = None self.built_image_inspect = None self.layer_sizes = [] self.default_image_build_method = CONTAINER_DEFAULT_BUILD_METHOD self.storage_transport = DOCKER_STORAGE_TRANSPORT_NAME # list of images pulled during the build, to be deleted after the build self.pulled_base_images = set() # When an image is exported into tarball, it can then be processed by various plugins. # Each plugin that transforms the image should save it as a new file and append it to # the end of exported_image_sequence. Other plugins should then operate with last # member of this structure. Example: # [{'path': '/tmp/foo.tar', 'size': 12345678, 'md5sum': '<md5>', 'sha256sum': '<sha256>'}] # You can use util.get_exported_image_metadata to create a dict to append to this list. self.exported_image_sequence = [] self.tag_conf = TagConf() self.push_conf = PushConf() # mapping of downloaded files; DON'T PUT ANYTHING BIG HERE! # "path/to/file" -> "content" self.files = {} self.openshift_build_selflink = openshift_build_selflink # List of RPMs that go into the final result, as per utils.rpm.parse_rpm_output self.image_components = None # List of all yum repos. The provided repourls might be changed (by resolve_composes) when # inheritance is enabled. This property holds the updated list of repos, allowing # post-build plugins (such as koji_import) to record them. self.all_yum_repourls = None # info about pre-declared build, build-id and token self.reserved_build_id = None self.reserved_token = None self.triggered_after_koji_task = None self.koji_source_nvr = {} self.koji_source_source_url = None self.koji_source_manifest = None # Plugins can store info here using the @annotation, @annotation_map, # @label and @label_map decorators from atomic_reactor.metadata self.annotations = {} self.labels = {} if client_version: logger.debug("build json was built by osbs-client %s", client_version) if kwargs: logger.warning("unprocessed keyword arguments: %s", kwargs)
def test_broken_source_config_file(self): s = get_source_instance_for({'provider': 'path', 'uri': SOURCE_CONFIG_ERROR_PATH}) with pytest.raises(ValidationError): s.config
def test_base_image_inspect(tmpdir, source_params, parents_pulled, insecure, base_exist): if MOCK: mock_docker() source_params.update({'tmpdir': str(tmpdir)}) s = get_source_instance_for(source_params) b = InsideBuilder(s, '') b.tasker.build_method = default_build_method b.parents_pulled = parents_pulled if b.dockerfile_images.base_from_scratch: base_exist = True registry_name = "registry.example.com" original_parents = b.dockerfile_images.original_parents new_parents = [] for parent in original_parents: if parent == 'scratch': new_parents.append(parent) else: mod_parent = ImageName.parse(parent) mod_parent.registry = registry_name new_parents.append(mod_parent.to_str()) b.dockerfile_images = DockerfileImages(new_parents) b.pull_registries = { registry_name: { 'insecure': insecure, 'dockercfg_path': str(tmpdir) } } if base_exist: if b.dockerfile_images.base_from_scratch: built_inspect = b.base_image_inspect assert built_inspect == {} else: if not parents_pulled: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').with_args( b.dockerfile_images.base_image, b.dockerfile_images.base_image.registry, insecure, str(tmpdir)).and_return({'Id': 123})) built_inspect = b.base_image_inspect assert built_inspect is not None assert built_inspect["Id"] is not None else: if parents_pulled or b.dockerfile_images.custom_base_image: response = flexmock(content="not found", status_code=404) (flexmock( docker.APIClient).should_receive('inspect_image').and_raise( docker.errors.NotFound, "xyz", response)) with pytest.raises(KeyError): b.base_image_inspect # pylint: disable=pointless-statement; is a property else: (flexmock(atomic_reactor.util).should_receive( 'get_inspect_for_image').and_raise(NotImplementedError)) with pytest.raises(NotImplementedError): b.base_image_inspect # pylint: disable=pointless-statement; is a property
def test_retrieves_source_config_file(self): s = get_source_instance_for({'provider': 'path', 'uri': DOCKERFILE_OK_PATH}) assert s.config assert s.config.image_build_method == 'imagebuilder'