def workflow(tmpdir): workflow = DockerBuildWorkflow(TEST_IMAGE, source={ "provider": "git", "uri": "asd" }) # Stash the tmpdir in workflow so it can be used later workflow._tmpdir = tmpdir class MockSource(StubSource): def __init__(self, workdir): super(MockSource, self).__init__() self.workdir = workdir workflow.source = MockSource(str(tmpdir)) builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path(str(tmpdir)) builder.tasker = flexmock() workflow.builder = flexmock(builder) workflow.buildstep_plugins_conf = [{'name': PLUGIN_BUILD_ORCHESTRATE_KEY}] mock_repo_config(workflow) mock_reactor_config(workflow) mock_build_json() mock_cachito_api(workflow) mock_koji() return workflow
def mock_environment(tmpdir, primary_images=None, annotations=None): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(source=SOURCE) base_image_id = '123456parent-id' setattr(workflow, '_base_image_inspect', {'Id': base_image_id}) setattr(workflow, 'builder', StubInsideBuilder()) setattr(workflow.builder, 'image_id', '123456imageid') setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22')) setattr(workflow.builder, 'source', StubInsideBuilder()) setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id}) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) setattr(workflow, 'tag_conf', TagConf()) if primary_images: for image in primary_images: if '-' in ImageName.parse(image).tag: workflow.tag_conf.add_primary_image(image) workflow.tag_conf.add_unique_image(primary_images[0]) workflow.tag_conf.add_floating_image('namespace/httpd:floating') workflow.build_result = BuildResult(image_id='123456', annotations=annotations or {}) return tasker, workflow
def mock_workflow(tmpdir, for_orchestrator=False, config_map=None, default_si=DEFAULT_SIGNING_INTENT): workflow = DockerBuildWorkflow(TEST_IMAGE, source={ "provider": "git", "uri": "asd" }) workflow.source = MockSource(str(tmpdir)) builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path(str(tmpdir)) builder.tasker = flexmock() workflow.builder = flexmock(builder) if for_orchestrator: workflow.buildstep_plugins_conf = [{ 'name': constants.PLUGIN_BUILD_ORCHESTRATE_KEY }] mock_reactor_config(workflow, tmpdir, data=config_map, default_si=default_si) return workflow
def mock_environment(tmpdir, workflow, primary_images=None, floating_images=None, manifest_results=None, annotations=None): if MOCK: mock_docker() tasker = DockerTasker() base_image_id = '123456parent-id' setattr(workflow, '_base_image_inspect', {'Id': base_image_id}) setattr(workflow, 'builder', StubInsideBuilder()) setattr(workflow.builder, 'image_id', '123456imageid') setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22')) setattr(workflow.builder, 'source', StubInsideBuilder()) setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id}) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) if primary_images: for image in primary_images: if '-' in ImageName.parse(image).tag: workflow.tag_conf.add_primary_image(image) workflow.tag_conf.add_unique_image(primary_images[0]) if floating_images: workflow.tag_conf.add_floating_images(floating_images) workflow.build_result = BuildResult(image_id='123456', annotations=annotations or {}) workflow.postbuild_results = {} if manifest_results: workflow.postbuild_results[PLUGIN_GROUP_MANIFESTS_KEY] = manifest_results return tasker, workflow
def workflow(tmpdir, user_params): workflow = DockerBuildWorkflow(source=MOCK_SOURCE) # Stash the tmpdir in workflow so it can be used later workflow._tmpdir = tmpdir class MockSource(StubSource): def __init__(self, workdir): super(MockSource, self).__init__() self.workdir = workdir workflow.source = MockSource(str(tmpdir)) builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path(str(tmpdir)) builder.tasker = flexmock() workflow.builder = flexmock(builder) workflow.buildstep_plugins_conf = [{'name': PLUGIN_BUILD_ORCHESTRATE_KEY}] mock_repo_config(workflow) mock_reactor_config(workflow) mock_build_json() mock_koji() return workflow
def mock_workflow(tmpdir): workflow = DockerBuildWorkflow({"provider": "git", "uri": "asd"}, TEST_IMAGE) workflow.source = StubSource() builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path(str(tmpdir)) builder.tasker = flexmock() workflow.builder = flexmock(builder) return workflow
def mock_workflow(tmpdir): workflow = DockerBuildWorkflow(TEST_IMAGE, source={ "provider": "git", "uri": "asd" }) workflow.source = StubSource() builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path(str(tmpdir)) builder.tasker = flexmock() workflow.builder = flexmock(builder) return workflow
def mock_workflow(tmpdir, for_orchestrator=False): workflow = DockerBuildWorkflow( TEST_IMAGE, source={"provider": "git", "uri": "asd"} ) workflow.source = StubSource() builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path(str(tmpdir)) builder.tasker = flexmock() workflow.builder = flexmock(builder) if for_orchestrator: workflow.buildstep_plugins_conf = [{'name': PLUGIN_BUILD_ORCHESTRATE_KEY}] return workflow
def prepare(self, df_path, inherited_user='', hide_files=None, parent_images=None): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow("test-image", source=SOURCE) workflow.source = MockSource(df_path) workflow.builder = (StubInsideBuilder() .for_workflow(workflow) .set_df_path(df_path)) for parent in parent_images or []: workflow.builder.set_parent_inspection_data(parent, { INSPECT_CONFIG: { 'User': inherited_user, }, }) if hide_files is not None: reactor_config = ReactorConfig({ 'version': 1, 'hide_files': hide_files }) workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: reactor_config } return tasker, workflow
def test_dangling_volumes_removed(docker_tasker, caplog): mock_docker() workflow = DockerBuildWorkflow(TEST_IMAGE, source=SOURCE) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_base_from_scratch(False) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) runner.run() logs = {} for record in caplog.records: logs.setdefault(record.levelno, []).append(record.message) assert "container_id = 'f8ee920b2db5e802da2583a13a4edbf0523ca5fff6b6d6454c1fd6db5f38014d'" \ in logs[logging.DEBUG] expected_volumes = [u'test', u'conflict_exception', u'real_exception'] assert "volumes = {}".format(expected_volumes) in logs[logging.DEBUG] for volume in expected_volumes: assert "removing volume '{}'".format(volume) in logs[logging.INFO] assert 'ignoring a conflict when removing volume conflict_exception' in logs[ logging.DEBUG]
def test_dangling_volumes_removed(docker_tasker, request): # noqa:F811 fake_logger = FakeLogger() existing_logger = atomic_reactor.core.logger def restore_logger(): atomic_reactor.core.logger = existing_logger request.addfinalizer(restore_logger) atomic_reactor.core.logger = fake_logger mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) runner.run() assert ("container_id = '%s'", u'f8ee920b2db5e802da2583a13a4edbf0523ca5fff6b6d6454c1fd6db5f38014d') \ in fake_logger.debugs expected_volumes = [u'test', u'conflict_exception', u'real_exception'] assert ("volumes = %s", expected_volumes) in fake_logger.debugs assert ("removing volume '%s'", u'test') in fake_logger.infos assert ("removing volume '%s'", u'conflict_exception') in fake_logger.infos assert ("removing volume '%s'", u'real_exception') in fake_logger.infos assert ('ignoring a conflict when removing volume %s', 'conflict_exception') in \ fake_logger.debugs
def test_rpmqa_plugin(caplog, docker_tasker, base_from_scratch, remove_container_error, ignore_autogenerated): should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) workflow = DockerBuildWorkflow(TEST_IMAGE, source=SOURCE) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_base_from_scratch(base_from_scratch) flexmock(docker.APIClient, logs=mock_logs) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"] } }]) results = runner.run() if base_from_scratch: log_msg = "from scratch can't run rpmqa" assert log_msg in caplog.text assert results[PostBuildRPMqaPlugin.key] is None assert workflow.image_components is None else: assert results[ PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"] assert workflow.image_components == parse_rpm_output( ignore_autogenerated["package_list"])
def test_yuminject_plugin_wrapped(tmpdir, docker_tasker): df_content = """\ FROM fedora RUN yum install -y python-django CMD blabla""" df = df_parser(str(tmpdir)) df.content = df_content workflow = DockerBuildWorkflow(SOURCE, "test-image") workflow.builder = (StubInsideBuilder().for_workflow(workflow).set_df_path( df.dockerfile_path)) metalink = 'https://mirrors.fedoraproject.org/metalink?repo=fedora-$releasever&arch=$basearch' workflow.files[os.path.join(YUM_REPOS_DIR, DEFAULT_YUM_REPOFILE_NAME)] = \ render_yum_repo(OrderedDict((('name', 'my-repo'), ('metalink', metalink), ('enabled', '1'), ('gpgcheck', '0')), )) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': InjectYumRepoPlugin.key, 'args': { "wrap_commands": True } }]) runner.run() assert InjectYumRepoPlugin.key is not None expected_output = """FROM fedora RUN printf "[my-repo]\nname=my-repo\nmetalink=https://mirrors.fedoraproject.org/metalink?repo=fedora-\\$releasever&arch=\\$basearch\nenabled=1\ngpgcheck=0\n" >/etc/yum.repos.d/atomic-reactor-injected.repo && yum install -y python-django && yum clean all && rm -f /etc/yum.repos.d/atomic-reactor-injected.repo CMD blabla""" # noqa assert df.content == expected_output
def get_builder(workflow, base_from_scratch=False): workflow.builder = StubInsideBuilder().for_workflow(workflow) if base_from_scratch: workflow.builder.set_dockerfile_images(['scratch']) else: workflow.builder.set_dockerfile_images([]) return workflow.builder
def __init__(self): self.workflow = DockerBuildWorkflow(source=MOCK_SOURCE) self.workflow.source = StubSource() self.workflow.builder = StubInsideBuilder().for_workflow(self.workflow) self.workflow.builder.tasker = flexmock() self._phase = None self._plugin_key = None
def mock_workflow(): """ Provide just enough structure that workflow can be used to run the plugin. Defaults below are solely to enable that purpose; tests where those values matter should provide their own. """ workflow = DockerBuildWorkflow(SOURCE, "mock:default_built") workflow.source = StubSource() builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path('/mock-path') base_image_name = ImageName.parse("mock:tag") builder.parent_images[ImageName.parse("mock:base")] = base_image_name builder.base_image = base_image_name builder.tasker = flexmock() workflow.builder = flexmock(builder) return workflow
def prepare(df_path): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow("test-image", source=SOURCE) workflow.source = StubSource() workflow.builder = ( StubInsideBuilder().for_workflow(workflow).set_df_path(df_path)) return tasker, workflow
def prepare(): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) session = MockedClientSession(hub='', opts=None) workflow.koji_session = session flexmock(koji, ClientSession=session, PathInfo=MockedPathInfo) return tasker, workflow
def test_add_help_generate_metadata(tmpdir, docker_tasker, workflow, filename): df_content = dedent("""\ FROM fedora LABEL name='test' \\ maintainer='me' """) df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = StubInsideBuilder().set_df_path(df.dockerfile_path) help_markdown_path = os.path.join(workflow.builder.df_dir, filename) generate_a_file(help_markdown_path, "foo") help_man_path = os.path.join(workflow.builder.df_dir, AddHelpPlugin.man_filename) generate_a_file(help_man_path, "bar") cmd = [ 'go-md2man', '-in={}'.format(help_markdown_path), '-out={}'.format(help_man_path) ] def check_popen(*args, **kwargs): assert args[0] == cmd return MockedPopen() (flexmock(subprocess).should_receive("Popen").once().replace_with( check_popen)) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': AddHelpPlugin.key, 'args': { 'help_file': filename } }]) runner.run() lines = "" with open(help_markdown_path) as f: lines = "".join(f.readlines()) example = dedent("""\ %% test (1) Container Image Pages %% me %% %s foo""") % dt.fromtimestamp(atomic_reactor_start_time).strftime( format="%B %-d, %Y") assert lines == dedent(example)
def workflow(tmpdir, user_params): if MOCK: mock_docker() workflow = DockerBuildWorkflow(source=MOCK_SOURCE) workflow.source = MockSource(tmpdir) workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_dockerfile_images(['base:latest']) workflow.builder.dockerfile_images['base:latest'] = ImageName.parse('base:stubDigest') workflow.builder.set_image('image') base_inspect = {INSPECT_CONFIG: {'Labels': BASE_IMAGE_LABELS.copy()}} workflow.builder.set_inspection_data(base_inspect) workflow.builder.set_parent_inspection_data('base:stubDigest', base_inspect) workflow.builder.parent_images_digests = {'base:latest': {V2_LIST: 'stubDigest'}} return workflow
def prepare(df_path, inherited_user=''): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(source=SOURCE) workflow.source = StubSource() workflow.builder = ( StubInsideBuilder().for_workflow(workflow).set_dockerfile_images( df_parser(df_path).parent_images).set_df_path( df_path).set_inspection_data({ INSPECT_CONFIG: { 'User': inherited_user, }, })) return tasker, workflow
def test_rpmqa_plugin_exception(docker_tasker): # noqa mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) flexmock(docker.APIClient, logs=mock_logs_raise) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) with pytest.raises(PluginFailedException): runner.run()
def prepare(scratch=False): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(source={"provider": "git", "uri": DOCKERFILE_GIT}) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_dockerfile_images([]) workflow.user_params['scratch'] = scratch (flexmock(requests.Response, content=repocontent) .should_receive('raise_for_status') .and_return(None)) (flexmock(requests.Session, get=lambda *_: requests.Response())) mock_get_retry_session() return tasker, workflow
def prepare(): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow({ "provider": "git", "uri": DOCKERFILE_GIT }, "test-image") workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) (flexmock(requests.Response, content=repocontent).should_receive( 'raise_for_status').and_return(None)) (flexmock(requests.Session, get=lambda *_: requests.Response())) mock_get_retry_session() return tasker, workflow
def prepare(df_path, inherited_user=''): if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") workflow.builder = (StubInsideBuilder().for_workflow(workflow).set_df_path( df_path).set_inspection_data({ INSPECT_CONFIG: { 'User': inherited_user, }, })) (flexmock(requests.Response, content=repocontent).should_receive( 'raise_for_status').and_return(None)) (flexmock(requests.Session, get=lambda *_: requests.Response())) return tasker, workflow
def test_empty_logs_retry(docker_tasker): # noqa mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) flexmock(docker.APIClient, logs=mock_logs_retry) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) results = runner.run() assert results[PostBuildRPMqaPlugin.key] == PACKAGE_LIST assert workflow.image_components == parse_rpm_output(PACKAGE_LIST)
def mock_workflow(): """ Provide just enough structure that workflow can be used to run the plugin. Defaults below are solely to enable that purpose; tests where those values matter should provide their own. """ workflow = DockerBuildWorkflow(source=SOURCE) workflow.source = StubSource() builder = StubInsideBuilder().for_workflow(workflow) builder.set_df_path('/mock-path') builder.set_dockerfile_images(['mock:base']) builder.dockerfile_images['mock:base'] = ImageName.parse("mock:tag") builder.tasker = flexmock() workflow.builder = flexmock(builder) return workflow
def test_download_remote_source(self, docker_tasker): workflow = DockerBuildWorkflow( TEST_IMAGE, source={ "provider": "git", "uri": "asd" }, ) workflow.builder = StubInsideBuilder().for_workflow(workflow) filename = 'source.tar.gz' url = 'https://example.com/dir/{}'.format(filename) # Make a compressed tarfile with a single file 'abc' member = 'abc' abc_content = b'def' content = BytesIO() with tarfile.open(mode='w:gz', fileobj=content) as tf: ti = tarfile.TarInfo(name=member) ti.size = len(abc_content) tf.addfile(ti, fileobj=BytesIO(abc_content)) # GET from the url returns the compressed tarfile responses.add(responses.GET, url, body=content.getvalue()) buildargs = {'spam': 'maps'} plugin = DownloadRemoteSourcePlugin(docker_tasker, workflow, remote_source_url=url, remote_source_build_args=buildargs) result = plugin.run() # The return value should be the path to the downloaded archive itself with open(result, 'rb') as f: filecontent = f.read() assert filecontent == content.getvalue() # Expect a file 'abc' in the workdir with open(os.path.join(workflow.source.workdir, member), 'rb') as f: filecontent = f.read() assert filecontent == abc_content # Expect buildargs to have been set for arg, value in buildargs.items(): assert workflow.builder.buildargs[arg] == value
def test_add_help_plugin(tmpdir, docker_tasker, workflow, filename): df_content = dedent(""" FROM fedora RUN yum install -y python-django CMD blabla""") df = df_parser(str(tmpdir)) df.content = df_content workflow.builder = StubInsideBuilder().set_df_path(df.dockerfile_path) help_markdown_path = os.path.join(workflow.builder.df_dir, filename) generate_a_file(help_markdown_path, "foo") help_man_path = os.path.join(workflow.builder.df_dir, AddHelpPlugin.man_filename) generate_a_file(help_man_path, "bar") cmd = [ 'go-md2man', '-in={}'.format(help_markdown_path), '-out={}'.format(help_man_path) ] def check_popen(*args, **kwargs): assert args[0] == cmd return MockedPopen() (flexmock(subprocess).should_receive("Popen").once().replace_with( check_popen)) runner = PreBuildPluginsRunner(docker_tasker, workflow, [{ 'name': AddHelpPlugin.key, 'args': { 'help_file': filename } }]) runner.run() assert df.content == dedent( """ FROM fedora RUN yum install -y python-django ADD %s /%s CMD blabla""" % (AddHelpPlugin.man_filename, AddHelpPlugin.man_filename)) assert workflow.annotations['help_file'] == filename
def test_empty_logs_failure(docker_tasker): # noqa mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) flexmock(docker.APIClient, logs=mock_logs_empty) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) with pytest.raises(PluginFailedException) as exc_info: runner.run() assert 'Unable to gather list of installed packages in container' in str( exc_info.value)
def prepare(scratch=False): if MOCK: mock_docker() build_json = {'metadata': {'labels': {'scratch': scratch}}} flexmock(util).should_receive('get_build_json').and_return(build_json) tasker = DockerTasker() workflow = DockerBuildWorkflow("test-image", source={ "provider": "git", "uri": DOCKERFILE_GIT }) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) (flexmock(requests.Response, content=repocontent).should_receive( 'raise_for_status').and_return(None)) (flexmock(requests.Session, get=lambda *_: requests.Response())) mock_get_retry_session() return tasker, workflow
def test_exceed_binary_image_size(image_size_limit, workflow): config = { 'version': 1, 'registries': [ {'url': LOCALHOST_REGISTRY} ], } if image_size_limit is not None: config['image_size_limit'] = image_size_limit # workflow = DockerBuildWorkflow(source=MOCK_SOURCE) workflow.plugin_workspace[ReactorConfigPlugin.key] = { WORKSPACE_CONF_KEY: ReactorConfig(config) } workflow.builder = StubInsideBuilder() workflow.builder.image_id = INPUT_IMAGE # fake layer sizes of the test image workflow.layer_sizes = [ {'diff_id': '12345', 'size': 1000}, {'diff_id': '23456', 'size': 2000}, {'diff_id': '34567', 'size': 3000}, ] mock_docker() plugin = TagAndPushPlugin(DockerTasker(), workflow) if image_size_limit is None or image_size_limit['binary_image'] == 0: # The plugin should skip the check on image size (flexmock(atomic_reactor.plugins.post_tag_and_push) .should_receive('get_manifest_digests') .and_return(ManifestDigest({ 'v2': 'application/vnd.docker.distribution.manifest.list.v2+json', }))) (flexmock(atomic_reactor.plugins.post_tag_and_push) .should_receive('get_config_from_registry')) assert workflow.image == plugin.run()[0].repo else: with pytest.raises(ExceedsImageSizeError): plugin.run()