def test_get_present_bundles_grpc_not_initialize( mock_gil, mock_copy, mock_run_cmd, mock_popen, mock_sleep, mock_remove, mock_time, tmpdir ): with open(tmpdir.join('cidfile.txt'), 'w+') as f: f.write('container_id') mock_run_cmd.side_effect = ['', '', '', '', ''] * 4 mock_time.side_effect = list(range(1, 80)) mock_gil.return_value = 'some-path' my_mock = mock.MagicMock() mock_popen.return_value = my_mock my_mock.poll.return_value = None with pytest.raises(IIBError, match='Index registry has not been initialized after 5 tries'): build._get_present_bundles('quay.io/index-image:4.5', str(tmpdir)) assert mock_run_cmd.call_count == 20
def test_get_present_bundles(mock_gil, mock_copy, mock_run_cmd, mock_popen, mock_sleep, tmpdir): with open(tmpdir.join('cidfile.txt'), 'w+') as f: f.write('container_id') mock_gil.return_value = 'some-path' mock_run_cmd.side_effect = [ 'api.Registry.ListBundles', '{"packageName": "package1", "version": "v1.0", "bundlePath":"bundle1"\n}' '\n{\n"packageName": "package2", "version": "v2.0", "bundlePath":"bundle2"}' '\n{\n"packageName": "package2", "version": "v2.0", "bundlePath":"bundle2"}', ] my_mock = mock.MagicMock() mock_popen.return_value = my_mock my_mock.stderr.read.return_value = 'address already in use' my_mock.poll.side_effect = [1, None] bundles, bundles_pull_spec = build._get_present_bundles( 'quay.io/index-image:4.5', str(tmpdir)) assert bundles == [ { 'packageName': 'package1', 'version': 'v1.0', 'bundlePath': 'bundle1' }, { 'packageName': 'package2', 'version': 'v2.0', 'bundlePath': 'bundle2' }, ] assert bundles_pull_spec == ['bundle1', 'bundle2'] assert mock_run_cmd.call_count == 2
def test_get_present_bundles_grpc_delayed_initialize( mock_gil, mock_copy, mock_run_cmd, mock_popen, mock_sleep, mock_remove, mock_time, tmpdir ): with open(tmpdir.join('cidfile.txt'), 'w+') as f: f.write('container_id') mock_time.side_effect = [i * 0.5 for i in range(1, 80)] mock_gil.return_value = 'some-path' mock_run_cmd.side_effect = [ '', '', '', '', '', '', 'api.Registry.ListBundles', '{"packageName": "package1", "version": "v1.0", "bundlePath": "bundle1"\n}' '\n{\n"packageName": "package2", "version": "v2.0", "bundlePath": "bundle2"}' '\n{\n"packageName": "package2", "version": "v2.0", "bundlePath": "bundle2"}', ] my_mock = mock.MagicMock() mock_popen.return_value = my_mock my_mock.poll.return_value = None bundles, bundles_pull_spec = build._get_present_bundles('quay.io/index-image:4.5', str(tmpdir)) assert bundles == [ {'packageName': 'package1', 'version': 'v1.0', 'bundlePath': 'bundle1'}, {'packageName': 'package2', 'version': 'v2.0', 'bundlePath': 'bundle2'}, ] assert bundles_pull_spec == ['bundle1', 'bundle2'] assert mock_run_cmd.call_count == 8
def test_get_present_bundles(moc_osfi, mock_run_cmd, tmpdir): rpc_mock = mock.MagicMock() moc_osfi.return_value = (50051, rpc_mock) mock_run_cmd.return_value = ( '{"packageName": "package1", "version": "v1.0", "bundlePath":"bundle1"\n}' '\n{\n"packageName": "package2", "version": "v2.0", "bundlePath":"bundle2"}' '\n{\n"packageName": "package2", "version": "v2.0", "bundlePath":"bundle2"}' ) bundles, bundles_pull_spec = build._get_present_bundles( 'quay.io/index-image:4.5', str(tmpdir)) assert bundles == [ { 'packageName': 'package1', 'version': 'v1.0', 'bundlePath': 'bundle1' }, { 'packageName': 'package2', 'version': 'v2.0', 'bundlePath': 'bundle2' }, ] assert bundles_pull_spec == ['bundle1', 'bundle2'] mock_run_cmd.assert_called_once()
def test_get_no_present_bundles(mock_gil, mock_copy, mock_run_cmd, mock_popen, mock_sleep, tmpdir): with open(tmpdir.join('cidfile.txt'), 'w+') as f: f.write('container_id') mock_gil.return_value = 'some-path' mock_run_cmd.side_effect = ['api.Registry.ListBundles', ''] my_mock = mock.MagicMock() mock_popen.return_value = my_mock my_mock.stderr.read.return_value = 'address already in use' my_mock.poll.side_effect = [1, None] bundle, bundle_pull_spec = build._get_present_bundles('quay.io/index-image:4.5', str(tmpdir)) assert bundle == [] assert bundle_pull_spec == [] assert mock_run_cmd.call_count == 2
def test_get_no_present_bundles( moc_osfi, mock_run_cmd, tmpdir, ): rpc_mock = mock.MagicMock() moc_osfi.return_value = (50051, rpc_mock) mock_run_cmd.return_value = '' bundle, bundle_pull_spec = build._get_present_bundles( 'quay.io/index-image:4.5', str(tmpdir)) assert bundle == [] assert bundle_pull_spec == [] mock_run_cmd.assert_called_once()
def handle_merge_request( source_from_index, deprecation_list, request_id, binary_image=None, target_index=None, overwrite_target_index=False, overwrite_target_index_token=None, distribution_scope=None, binary_image_config=None, ): """ Coordinate the work needed to merge old (N) index image with new (N+1) index image. :param str source_from_index: pull specification to be used as the base for building the new index image. :param str target_index: pull specification of content stage index image for the corresponding target index image. :param list deprecation_list: list of deprecated bundles for the target index image. :param int request_id: the ID of the IIB build request. :param str binary_image: the pull specification of the container image where the opm binary gets copied from. :param bool overwrite_target_index: if True, overwrite the input ``target_index`` with the built index image. :param str overwrite_target_index_token: the token used for overwriting the input ``target_index`` image. This is required for non-privileged users to use ``overwrite_target_index``. The format of the token must be in the format "user:password". :param str distribution_scope: the scope for distribution of the index image, defaults to ``None``. :raises IIBError: if the index image merge fails. """ _cleanup() prebuild_info = _prepare_request_for_build( request_id, binary_image, overwrite_from_index_token=overwrite_target_index_token, source_from_index=source_from_index, target_index=target_index, distribution_scope=distribution_scope, binary_image_config=binary_image_config, ) _update_index_image_build_state(request_id, prebuild_info) with tempfile.TemporaryDirectory(prefix='iib-') as temp_dir: set_request_state(request_id, 'in_progress', 'Getting bundles present in the index images') log.info('Getting bundles present in the source index image') source_index_bundles = _get_present_bundles(source_from_index, temp_dir) target_index_bundles = [] if target_index: log.info('Getting bundles present in the target index image') target_index_bundles = _get_present_bundles(target_index, temp_dir) arches = list(prebuild_info['arches']) arch = 'amd64' if 'amd64' in arches else arches[0] missing_bundles = _add_bundles_missing_in_source( source_index_bundles, target_index_bundles, temp_dir, prebuild_info['binary_image'], source_from_index, request_id, arch, prebuild_info['target_ocp_version'], overwrite_target_index_token, distribution_scope=prebuild_info['distribution_scope'], ) set_request_state(request_id, 'in_progress', 'Deprecating bundles in the deprecation list') log.info('Deprecating bundles in the deprecation list') intermediate_bundles = source_index_bundles + missing_bundles deprecate_bundles = _get_bundles_from_deprecation_list( intermediate_bundles, deprecation_list) intermediate_image_name = _get_external_arch_pull_spec( request_id, arch, include_transport=False) if deprecate_bundles: _deprecate_bundles( deprecate_bundles, temp_dir, prebuild_info['binary_image'], intermediate_image_name, overwrite_target_index_token, ) for arch in sorted(prebuild_info['arches']): _build_image(temp_dir, 'index.Dockerfile', request_id, arch) _push_image(request_id, arch) output_pull_spec = _create_and_push_manifest_list(request_id, prebuild_info['arches']) _update_index_image_pull_spec( output_pull_spec, request_id, prebuild_info['arches'], target_index, overwrite_target_index, overwrite_target_index_token, prebuild_info['target_index_resolved'], ) set_request_state(request_id, 'complete', 'The index image was successfully cleaned and updated.')
def handle_merge_request( source_from_index, deprecation_list, request_id, binary_image=None, target_index=None, overwrite_target_index=False, overwrite_target_index_token=None, distribution_scope=None, binary_image_config=None, ): """ Coordinate the work needed to merge old (N) index image with new (N+1) index image. :param str source_from_index: pull specification to be used as the base for building the new index image. :param str target_index: pull specification of content stage index image for the corresponding target index image. :param list deprecation_list: list of deprecated bundles for the target index image. :param int request_id: the ID of the IIB build request. :param str binary_image: the pull specification of the container image where the opm binary gets copied from. :param bool overwrite_target_index: if True, overwrite the input ``target_index`` with the built index image. :param str overwrite_target_index_token: the token used for overwriting the input ``target_index`` image. This is required to use ``overwrite_target_index``. The format of the token must be in the format "user:password". :param str distribution_scope: the scope for distribution of the index image, defaults to ``None``. :raises IIBError: if the index image merge fails. """ _cleanup() prebuild_info = prepare_request_for_build( request_id, RequestConfigMerge( _binary_image=binary_image, overwrite_target_index_token=overwrite_target_index_token, source_from_index=source_from_index, target_index=target_index, distribution_scope=distribution_scope, binary_image_config=binary_image_config, ), ) _update_index_image_build_state(request_id, prebuild_info) source_from_index_resolved = prebuild_info['source_from_index_resolved'] target_index_resolved = prebuild_info['target_index_resolved'] with tempfile.TemporaryDirectory(prefix='iib-') as temp_dir: set_request_state(request_id, 'in_progress', 'Getting bundles present in the index images') log.info('Getting bundles present in the source index image') with set_registry_token(overwrite_target_index_token, source_from_index): source_index_bundles, source_index_bundles_pull_spec = _get_present_bundles( source_from_index_resolved, temp_dir) target_index_bundles = [] if target_index: log.info('Getting bundles present in the target index image') target_index_bundles, _ = _get_present_bundles( target_index_resolved, temp_dir) arches = list(prebuild_info['arches']) arch = 'amd64' if 'amd64' in arches else arches[0] missing_bundles, invalid_version_bundles = _add_bundles_missing_in_source( source_index_bundles, target_index_bundles, temp_dir, prebuild_info['binary_image'], source_from_index_resolved, request_id, arch, prebuild_info['target_ocp_version'], overwrite_target_index_token, distribution_scope=prebuild_info['distribution_scope'], ) set_request_state(request_id, 'in_progress', 'Deprecating bundles in the deprecation list') log.info('Deprecating bundles in the deprecation list') intermediate_bundles = [ bundle['bundlePath'] for bundle in missing_bundles ] + source_index_bundles_pull_spec deprecation_bundles = get_bundles_from_deprecation_list( intermediate_bundles, deprecation_list) # We do not need to pass the invalid_version_bundles through the # get_bundles_from_deprecation_list function because we already know # they are present in the newly created index. deprecation_bundles = deprecation_bundles + [ bundle['bundlePath'] for bundle in invalid_version_bundles ] intermediate_image_name = _get_external_arch_pull_spec( request_id, arch, include_transport=False) if deprecation_bundles: deprecate_bundles( deprecation_bundles, temp_dir, prebuild_info['binary_image'], intermediate_image_name, overwrite_target_index_token, ) for arch in sorted(prebuild_info['arches']): _build_image(temp_dir, 'index.Dockerfile', request_id, arch) _push_image(request_id, arch) # If the container-tool podman is used in the opm commands above, opm will create temporary # files and directories without the write permission. This will cause the context manager # to fail to delete these files. Adjust the file modes to avoid this error. chmod_recursively( temp_dir, dir_mode=(stat.S_IRWXU | stat.S_IRWXG), file_mode=(stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP), ) output_pull_spec = _create_and_push_manifest_list(request_id, prebuild_info['arches']) _update_index_image_pull_spec( output_pull_spec, request_id, prebuild_info['arches'], target_index, overwrite_target_index, overwrite_target_index_token, target_index_resolved, ) set_request_state(request_id, 'complete', 'The index image was successfully cleaned and updated.')
def handle_merge_request( source_from_index, deprecation_list, request_id, binary_image=None, target_index=None, overwrite_target_index=False, overwrite_target_index_token=None, distribution_scope=None, binary_image_config=None, build_tags=None, ): """ Coordinate the work needed to merge old (N) index image with new (N+1) index image. :param str source_from_index: pull specification to be used as the base for building the new index image. :param str target_index: pull specification of content stage index image for the corresponding target index image. :param list deprecation_list: list of deprecated bundles for the target index image. :param int request_id: the ID of the IIB build request. :param str binary_image: the pull specification of the container image where the opm binary gets copied from. :param bool overwrite_target_index: if True, overwrite the input ``target_index`` with the built index image. :param str overwrite_target_index_token: the token used for overwriting the input ``target_index`` image. This is required to use ``overwrite_target_index``. The format of the token must be in the format "user:password". :param str distribution_scope: the scope for distribution of the index image, defaults to ``None``. :param build_tags: list of extra tag to use for intermetdiate index image :raises IIBError: if the index image merge fails. """ _cleanup() prebuild_info = prepare_request_for_build( request_id, RequestConfigMerge( _binary_image=binary_image, overwrite_target_index_token=overwrite_target_index_token, source_from_index=source_from_index, target_index=target_index, distribution_scope=distribution_scope, binary_image_config=binary_image_config, ), ) _update_index_image_build_state(request_id, prebuild_info) source_from_index_resolved = prebuild_info['source_from_index_resolved'] target_index_resolved = prebuild_info['target_index_resolved'] dockerfile_name = 'index.Dockerfile' with tempfile.TemporaryDirectory(prefix='iib-') as temp_dir: with set_registry_token(overwrite_target_index_token, source_from_index): source_fbc = is_image_fbc(source_from_index_resolved) target_fbc = is_image_fbc(target_index_resolved) # do not remove - logging requested by stakeholders if source_fbc: log.info("Processing source index image as File-Based Catalog image") if target_fbc: log.info("Processing target index image as File-Based Catalog image") if source_fbc and not target_fbc: err_msg = ( 'Cannot merge source File-Based Catalog index image into target SQLite index image.' ) log.error(err_msg) raise IIBError(err_msg) set_request_state(request_id, 'in_progress', 'Getting bundles present in the index images') log.info('Getting bundles present in the source index image') with set_registry_token(overwrite_target_index_token, source_from_index): source_index_bundles, source_index_bundles_pull_spec = _get_present_bundles( source_from_index_resolved, temp_dir ) target_index_bundles = [] if target_index: log.info('Getting bundles present in the target index image') target_index_bundles, _ = _get_present_bundles(target_index_resolved, temp_dir) arches = list(prebuild_info['arches']) arch = sorted(arches)[0] missing_bundles, invalid_version_bundles = _add_bundles_missing_in_source( source_index_bundles, target_index_bundles, temp_dir, prebuild_info['binary_image'], source_from_index_resolved, request_id, arch, prebuild_info['target_ocp_version'], overwrite_target_index_token, distribution_scope=prebuild_info['distribution_scope'], ) missing_bundle_paths = [bundle['bundlePath'] for bundle in missing_bundles] if missing_bundle_paths: add_max_ocp_version_property(missing_bundle_paths, temp_dir) set_request_state(request_id, 'in_progress', 'Deprecating bundles in the deprecation list') log.info('Deprecating bundles in the deprecation list') intermediate_bundles = missing_bundle_paths + source_index_bundles_pull_spec deprecation_bundles = get_bundles_from_deprecation_list( intermediate_bundles, deprecation_list ) # We do not need to pass the invalid_version_bundles through the # get_bundles_from_deprecation_list function because we already know # they are present in the newly created index. deprecation_bundles = deprecation_bundles + [ bundle['bundlePath'] for bundle in invalid_version_bundles ] if deprecation_bundles: intermediate_image_name = _get_external_arch_pull_spec( request_id, arch, include_transport=False ) # we can check if source index is FBC or not because intermediate_image # will be always the same type because it is built # from source index image in _add_bundles_missing_in_source() if source_fbc: deprecate_bundles_fbc( bundles=deprecation_bundles, base_dir=temp_dir, binary_image=prebuild_info['binary_image'], from_index=intermediate_image_name, ) else: # opm can only deprecate a bundle image on an existing index image. Build and # push a temporary index image to satisfy this requirement. Any arch will do. # NOTE: we cannot use local builds because opm commands fails, # index image has to be pushed to registry _build_image(temp_dir, 'index.Dockerfile', request_id, arch) _push_image(request_id, arch) deprecate_bundles( bundles=deprecation_bundles, base_dir=temp_dir, binary_image=prebuild_info['binary_image'], from_index=intermediate_image_name, overwrite_target_index_token=overwrite_target_index_token, ) if target_fbc: index_db_file = os.path.join(temp_dir, get_worker_config()['temp_index_db_path']) # make sure FBC is generated right before build fbc_dir = opm_migrate(index_db=index_db_file, base_dir=temp_dir) if not source_fbc: # when source image is not FBC, but final image should be an FBC image # we have to generate Dockerfile for FBC (with hidden index.db) dockerfile_path = os.path.join(temp_dir, dockerfile_name) if os.path.isfile(dockerfile_path): log.info('Removing previously generated dockerfile.') os.remove(dockerfile_path) opm_generate_dockerfile( fbc_dir=fbc_dir, base_dir=temp_dir, index_db=index_db_file, binary_image=prebuild_info['binary_image'], dockerfile_name=dockerfile_name, ) _add_label_to_index( 'com.redhat.index.delivery.version', prebuild_info['target_ocp_version'], temp_dir, dockerfile_name, ) _add_label_to_index( 'com.redhat.index.delivery.distribution_scope', prebuild_info['distribution_scope'], temp_dir, dockerfile_name, ) for arch in sorted(prebuild_info['arches']): _build_image(temp_dir, dockerfile_name, request_id, arch) _push_image(request_id, arch) # If the container-tool podman is used in the opm commands above, opm will create temporary # files and directories without the write permission. This will cause the context manager # to fail to delete these files. Adjust the file modes to avoid this error. chmod_recursively( temp_dir, dir_mode=(stat.S_IRWXU | stat.S_IRWXG), file_mode=(stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP), ) output_pull_spec = _create_and_push_manifest_list( request_id, prebuild_info['arches'], build_tags ) _update_index_image_pull_spec( output_pull_spec, request_id, prebuild_info['arches'], target_index, overwrite_target_index, overwrite_target_index_token, target_index_resolved, ) set_request_state( request_id, 'complete', 'The index image was successfully cleaned and updated.' )