def test_parse_rpm_output(): res = parse_rpm_output([ "name1;1.0;1;x86_64;0;2000;" + FAKE_SIGMD5.decode() + ";23000;" + FAKE_SIGNATURE + ";(none)", "name2;2.0;1;x86_64;0;3000;" + FAKE_SIGMD5.decode() + ";24000;" + "(none);" + FAKE_SIGNATURE, "gpg-pubkey;64dab85d;57d33e22;(none);(none);0;(none);1473461794;(none);(none)", ]) assert res == [ { 'type': 'rpm', 'name': 'name1', 'version': '1.0', 'release': '1', 'arch': 'x86_64', 'epoch': 0, 'sigmd5': FAKE_SIGMD5.decode(), 'signature': "01234567890abc", }, { 'type': 'rpm', 'name': 'name2', 'version': '2.0', 'release': '1', 'arch': 'x86_64', 'epoch': 0, 'sigmd5': FAKE_SIGMD5.decode(), 'signature': "01234567890abc", } ] # Tests with different fields and separator res = parse_rpm_output(["1|1.0|name1"], tags=['RELEASE', 'VERSION', 'NAME'], separator="|") assert res == [ { 'type': 'rpm', 'name': 'name1', 'version': '1.0', 'release': '1', 'arch': None, 'epoch': None, 'sigmd5': None, 'signature': None, } ]
def test_rpmqa_plugin(docker_tasker, remove_container_error, ignore_autogenerated): if MOCK: should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) workflow = DockerBuildWorkflow(SOURCE, "test-image") setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', "asd123") setattr(workflow.builder, 'base_image', ImageName(repo='fedora', tag='21')) setattr(workflow.builder, "source", X()) setattr(workflow.builder.source, 'dockerfile_path', "/non/existent") setattr(workflow.builder.source, 'path', "/non/existent") flexmock(docker.APIClient, logs=mock_logs) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{"name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"]}} ]) results = runner.run() assert results[PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"] assert workflow.image_components == parse_rpm_output(ignore_autogenerated["package_list"])
def run(self): # If another component has already filled in the image component list, skip if self.workflow.image_components is not None: return None if self.workflow.builder.base_from_scratch: self.log.info("from scratch can't run rpmqa") return None plugin_output = self.gather_output() # gpg-pubkey are autogenerated packages by rpm when you import a gpg key # these are of course not signed, let's ignore those by default if self.ignore_autogenerated_gpg_keys: self.log.debug("ignore rpms 'gpg-pubkey'") plugin_output = [ x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep) ] self.tasker.cleanup_containers(*self._container_ids) self.workflow.image_components = parse_rpm_output(plugin_output) return plugin_output
def test_rpmqa_plugin(caplog, docker_tasker, base_from_scratch, remove_container_error, ignore_autogenerated): should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) workflow = DockerBuildWorkflow(TEST_IMAGE, source=SOURCE) workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.set_base_from_scratch(base_from_scratch) flexmock(docker.APIClient, logs=mock_logs) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"] } }]) results = runner.run() if base_from_scratch: log_msg = "from scratch can't run rpmqa" assert log_msg in caplog.text assert results[PostBuildRPMqaPlugin.key] is None assert workflow.image_components is None else: assert results[ PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"] assert workflow.image_components == parse_rpm_output( ignore_autogenerated["package_list"])
def test_parse_rpm_output(): res = parse_rpm_output([ "name1;1.0;1;x86_64;0;2000;" + FAKE_SIGMD5.decode() + ";23000;" + FAKE_SIGNATURE + ";(none)", "name2;2.0;1;x86_64;0;3000;" + FAKE_SIGMD5.decode() + ";24000;" + "(none);" + FAKE_SIGNATURE, "gpg-pubkey;64dab85d;57d33e22;(none);(none);0;(none);1473461794;(none);(none)", ]) assert res == [{ 'type': 'rpm', 'name': 'name1', 'version': '1.0', 'release': '1', 'arch': 'x86_64', 'epoch': 0, 'sigmd5': FAKE_SIGMD5.decode(), 'signature': "01234567890abc", }, { 'type': 'rpm', 'name': 'name2', 'version': '2.0', 'release': '1', 'arch': 'x86_64', 'epoch': 0, 'sigmd5': FAKE_SIGMD5.decode(), 'signature': "01234567890abc", }] # Tests with different fields and separator res = parse_rpm_output(["1|1.0|name1"], tags=['RELEASE', 'VERSION', 'NAME'], separator="|") assert res == [{ 'type': 'rpm', 'name': 'name1', 'version': '1.0', 'release': '1', 'arch': None, 'epoch': None, 'sigmd5': None, 'signature': None, }]
def test_empty_logs_retry(docker_tasker): # noqa mock_docker() workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) flexmock(docker.APIClient, logs=mock_logs_retry) runner = PostBuildPluginsRunner(docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE } }]) results = runner.run() assert results[PostBuildRPMqaPlugin.key] == PACKAGE_LIST assert workflow.image_components == parse_rpm_output(PACKAGE_LIST)
def run(self): # If another component has already filled in the image component list, skip if self.workflow.image_components is not None: return None container_id = self.tasker.run( self.image_id, command=rpm_qf_args(), create_kwargs={ "entrypoint": "/bin/rpm", "user": "******" }, start_kwargs={}, ) self.tasker.wait(container_id) plugin_output = self.tasker.logs(container_id, stream=False) # gpg-pubkey are autogenerated packages by rpm when you import a gpg key # these are of course not signed, let's ignore those by default if self.ignore_autogenerated_gpg_keys: self.log.debug("ignore rpms 'gpg-pubkey'") plugin_output = [ x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep) ] volumes = self.tasker.get_volumes_for_container(container_id) try: self.tasker.remove_container(container_id) except APIError: self.log.warning("error removing container (ignored):", exc_info=True) for volume_name in volumes: try: self.tasker.remove_volume(volume_name) except APIError: self.log.warning("error removing volume (ignored):", exc_info=True) self.workflow.image_components = parse_rpm_output(plugin_output) return plugin_output
def get_rpms(): """ Build a list of installed RPMs in the format required for the metadata. """ tags = [ 'NAME', 'VERSION', 'RELEASE', 'ARCH', 'EPOCH', 'SIGMD5', 'SIGPGP:pgpsig', 'SIGGPG:pgpsig', ] output = get_rpm_list(tags) return parse_rpm_output(output, tags)
def get_rpms(self): """ Build a list of installed RPMs in the format required for the metadata. """ tags = [ 'NAME', 'VERSION', 'RELEASE', 'ARCH', 'EPOCH', 'SIGMD5', 'SIGPGP:pgpsig', 'SIGGPG:pgpsig', ] cmd = "/bin/rpm " + rpm_qf_args(tags) try: # py3 (status, output) = subprocess.getstatusoutput(cmd) except AttributeError: # py2 with open('/dev/null', 'r+') as devnull: p = subprocess.Popen(cmd, shell=True, stdin=devnull, stdout=subprocess.PIPE, stderr=devnull) (stdout, stderr) = p.communicate() status = p.wait() output = stdout.decode() if status != 0: self.log.debug("%s: stderr output: %s", cmd, stderr) raise RuntimeError("%s: exit code %s" % (cmd, status)) return parse_rpm_output(output.splitlines(), tags)
def run(self): # If another component has already filled in the image component list, skip if self.workflow.image_components is not None: return None plugin_output = self.gather_output() # gpg-pubkey are autogenerated packages by rpm when you import a gpg key # these are of course not signed, let's ignore those by default if self.ignore_autogenerated_gpg_keys: self.log.debug("ignore rpms 'gpg-pubkey'") plugin_output = [ x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep) ] volumes = [] for container_id in self._container_ids: volumes.extend(self.tasker.get_volumes_for_container(container_id)) try: self.tasker.remove_container(container_id) except APIError: self.log.warning("error removing container %s (ignored):", container_id, exc_info=True) for volume_name in volumes: try: self.tasker.remove_volume(volume_name) except APIError: self.log.warning("error removing volume %s (ignored):", volume_name, exc_info=True) self.workflow.image_components = parse_rpm_output(plugin_output) return plugin_output
def run(self): # If another component has already filled in the image component list, skip if self.workflow.image_components is not None: return None container_id = self.tasker.run( self.image_id, command=rpm_qf_args(), create_kwargs={"entrypoint": "/bin/rpm"}, start_kwargs={}, ) self.tasker.wait(container_id) plugin_output = self.tasker.logs(container_id, stream=False) # gpg-pubkey are autogenerated packages by rpm when you import a gpg key # these are of course not signed, let's ignore those by default if self.ignore_autogenerated_gpg_keys: self.log.debug("ignore rpms 'gpg-pubkey'") plugin_output = [x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep)] volumes = self.tasker.get_volumes_for_container(container_id) try: self.tasker.remove_container(container_id) except APIError: self.log.warning("error removing container (ignored):", exc_info=True) for volume_name in volumes: try: self.tasker.remove_volume(volume_name) except APIError: self.log.warning("error removing volume (ignored):", exc_info=True) self.workflow.image_components = parse_rpm_output(plugin_output) return plugin_output
def run(self): # If another component has already filled in the image component list, skip if self.workflow.image_components is not None: return None if self.workflow.builder.base_from_scratch: self.log.info("from scratch can't run rpmqa") return None plugin_output = self.gather_output() # gpg-pubkey are autogenerated packages by rpm when you import a gpg key # these are of course not signed, let's ignore those by default if self.ignore_autogenerated_gpg_keys: self.log.debug("ignore rpms 'gpg-pubkey'") plugin_output = [x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep)] volumes = [] for container_id in self._container_ids: volumes.extend(self.tasker.get_volumes_for_container(container_id)) try: self.tasker.remove_container(container_id) except APIError: self.log.warning("error removing container %s (ignored):", container_id, exc_info=True) for volume_name in volumes: try: self.tasker.remove_volume(volume_name) except APIError: self.log.warning("error removing volume %s (ignored):", volume_name, exc_info=True) self.workflow.image_components = parse_rpm_output(plugin_output) return plugin_output
def test_rpmqa_plugin(docker_tasker, remove_container_error, ignore_autogenerated): should_raise_error = {} if remove_container_error: should_raise_error['remove_container'] = None mock_docker(should_raise_error=should_raise_error) workflow = DockerBuildWorkflow(SOURCE, TEST_IMAGE) workflow.builder = StubInsideBuilder().for_workflow(workflow) flexmock(docker.APIClient, logs=mock_logs) runner = PostBuildPluginsRunner( docker_tasker, workflow, [{ "name": PostBuildRPMqaPlugin.key, "args": { 'image_id': TEST_IMAGE, "ignore_autogenerated_gpg_keys": ignore_autogenerated["ignore"] } }]) results = runner.run() assert results[ PostBuildRPMqaPlugin.key] == ignore_autogenerated["package_list"] assert workflow.image_components == parse_rpm_output( ignore_autogenerated["package_list"])
def mock_environment(tmpdir, session=None, name=None, component=None, version=None, release=None, source=None, build_process_failed=False, docker_registry=True, pulp_registries=0, blocksize=None, task_states=None, additional_tags=None, has_config=None, prefer_schema1_digest=True): if session is None: session = MockedClientSession('', task_states=None) if source is None: source = GitSource('git', 'git://hostname/path') if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") base_image_id = '123456parent-id' workflow.source = StubSource() workflow.builder = StubInsideBuilder().for_workflow(workflow) workflow.builder.image_id = '123456imageid' workflow.builder.set_inspection_data({'Id': base_image_id}) setattr(workflow, 'tag_conf', TagConf()) with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df: df.write( 'FROM base\n' 'LABEL BZComponent={component} com.redhat.component={component}\n' 'LABEL Version={version} version={version}\n' 'LABEL Release={release} release={release}\n'.format( component=component, version=version, release=release)) workflow.builder.set_df_path(df.name) if name and version: workflow.tag_conf.add_unique_image( 'user/test-image:{v}-timestamp'.format(v=version)) if name and version and release: workflow.tag_conf.add_primary_images([ "{0}:{1}-{2}".format(name, version, release), "{0}:{1}".format(name, version), "{0}:latest".format(name) ]) if additional_tags: workflow.tag_conf.add_primary_images( ["{0}:{1}".format(name, tag) for tag in additional_tags]) flexmock(subprocess, Popen=fake_Popen) flexmock(koji, ClientSession=lambda hub, opts: session) flexmock(GitSource) setattr(workflow, 'source', source) setattr(workflow.source, 'lg', X()) setattr(workflow.source.lg, 'commit_id', '123456') setattr(workflow, 'push_conf', PushConf()) if docker_registry: docker_reg = workflow.push_conf.add_docker_registry( 'docker.example.com') for image in workflow.tag_conf.images: tag = image.to_str(registry=False) if pulp_registries and prefer_schema1_digest: docker_reg.digests[tag] = ManifestDigest(v1=fake_digest(image), v2='sha256:not-used') else: docker_reg.digests[tag] = ManifestDigest(v1='sha256:not-used', v2=fake_digest(image)) if has_config: docker_reg.config = { 'config': { 'architecture': LOCAL_ARCH }, 'container_config': {} } for pulp_registry in range(pulp_registries): workflow.push_conf.add_pulp_registry('env', 'pulp.example.com') with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp: fp.write('x' * 2**12) setattr(workflow, 'exported_image_sequence', [{ 'path': fp.name, 'type': IMAGE_TYPE_DOCKER_ARCHIVE }]) if build_process_failed: workflow.build_result = BuildResult( logs=["docker build log - \u2018 \u2017 \u2019 \n'"], fail_reason="not built") else: workflow.build_result = BuildResult( logs=["docker build log - \u2018 \u2017 \u2019 \n'"], image_id="id1234") workflow.prebuild_plugins_conf = {} workflow.image_components = parse_rpm_output([ "name1;1.0;1;" + LOCAL_ARCH + ";0;2000;" + FAKE_SIGMD5.decode() + ";23000;" "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abc;(none)", "name2;2.0;1;" + LOCAL_ARCH + ";0;3000;" + FAKE_SIGMD5.decode() + ";24000" "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abd;(none)", ]) return tasker, workflow
def _get_components(self, manifest): with open(manifest, 'r') as f: lines = f.readlines() return parse_rpm_output(lines)
def mock_environment(tmpdir, session=None, name=None, component=None, version=None, release=None, source=None, build_process_failed=False, docker_registry=True, pulp_registries=0, blocksize=None, task_states=None, additional_tags=None, has_config=None, prefer_schema1_digest=True): if session is None: session = MockedClientSession('', task_states=None) if source is None: source = GitSource('git', 'git://hostname/path') if MOCK: mock_docker() tasker = DockerTasker() workflow = DockerBuildWorkflow(SOURCE, "test-image") base_image_id = '123456parent-id' setattr(workflow, '_base_image_inspect', {'Id': base_image_id}) setattr(workflow, 'builder', X()) setattr(workflow.builder, 'image_id', '123456imageid') setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22')) setattr(workflow.builder, 'source', X()) setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id}) setattr(workflow.builder.source, 'dockerfile_path', None) setattr(workflow.builder.source, 'path', None) setattr(workflow, 'tag_conf', TagConf()) with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df: df.write('FROM base\n' 'LABEL BZComponent={component} com.redhat.component={component}\n' 'LABEL Version={version} version={version}\n' 'LABEL Release={release} release={release}\n' .format(component=component, version=version, release=release)) setattr(workflow.builder, 'df_path', df.name) if name and version: workflow.tag_conf.add_unique_image('user/test-image:{v}-timestamp' .format(v=version)) if name and version and release: workflow.tag_conf.add_primary_images(["{0}:{1}-{2}".format(name, version, release), "{0}:{1}".format(name, version), "{0}:latest".format(name)]) if additional_tags: workflow.tag_conf.add_primary_images(["{0}:{1}".format(name, tag) for tag in additional_tags]) flexmock(subprocess, Popen=fake_Popen) flexmock(koji, ClientSession=lambda hub, opts: session) flexmock(GitSource) setattr(workflow, 'source', source) setattr(workflow.source, 'lg', X()) setattr(workflow.source.lg, 'commit_id', '123456') setattr(workflow, 'push_conf', PushConf()) if docker_registry: docker_reg = workflow.push_conf.add_docker_registry('docker.example.com') for image in workflow.tag_conf.images: tag = image.to_str(registry=False) if pulp_registries and prefer_schema1_digest: docker_reg.digests[tag] = ManifestDigest(v1=fake_digest(image), v2='sha256:not-used') else: docker_reg.digests[tag] = ManifestDigest(v1='sha256:not-used', v2=fake_digest(image)) if has_config: docker_reg.config = { 'config': {'architecture': LOCAL_ARCH}, 'container_config': {} } for pulp_registry in range(pulp_registries): workflow.push_conf.add_pulp_registry('env', 'pulp.example.com') with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp: fp.write('x' * 2**12) setattr(workflow, 'exported_image_sequence', [{'path': fp.name, 'type': IMAGE_TYPE_DOCKER_ARCHIVE}]) if build_process_failed: workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"], fail_reason="not built") else: workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"], image_id="id1234") workflow.prebuild_plugins_conf = {} workflow.image_components = parse_rpm_output([ "name1;1.0;1;" + LOCAL_ARCH + ";0;2000;" + FAKE_SIGMD5.decode() + ";23000;" "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abc;(none)", "name2;2.0;1;" + LOCAL_ARCH + ";0;3000;" + FAKE_SIGMD5.decode() + ";24000" "RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abd;(none)", ]) return tasker, workflow