def __init__(self, pyxis=None):
     super().__init__()
     if pyxis:
         self._pyxis = pyxis
     else:
         if not conf.pyxis_server_url:
             raise ValueError("'pyxis_server_url' parameter should be set")
         self._pyxis = Pyxis(conf.pyxis_server_url)
    def __init__(self, pyxis=None):
        super().__init__()
        if pyxis:
            self._pyxis = pyxis
        else:
            if not conf.pyxis_server_url:
                raise ValueError("'PYXIS_SERVER_URL' parameter should be set")
            self._pyxis = Pyxis(conf.pyxis_server_url)

        if not conf.freshmaker_root_url or "://" not in conf.freshmaker_root_url:
            raise ValueError(
                "'FRESHMAKER_ROOT_URL' parameter should be set to "
                "a valid URL")
Exemple #3
0
    def setUp(self):
        super().setUp()

        self.patcher = helpers.Patcher('freshmaker.pyxis.')

        self.fake_server_url = 'https://pyxis.localhost/'
        self.px = Pyxis(self.fake_server_url)
        self.response = create_autospec(requests.Response)
        self.response.status_code = HTTPStatus.OK
        self.bad_requests_response = {
            "detail": [
                "Unable to parse the filter from URL.",
                "Please verify the 'Field Name' in the RSQL Expression.",
                "Please visit the following end-point for more details:",
                "    /v1/docs/filtering-language"
            ],
            "status":
            400,
            "title":
            "Bad Request",
            "type":
            "about:blank"
        }

        self.empty_response_page = {
            "data": [],
            "page": 0,
            "page_size": 100,
            "total": 0
        }

        self.indices = [{
            "_id": "1",
            "created_by": "meteor",
            "creation_date": "2020-01-01T09:32:31.692000+00:00",
            "last_update_date": "2020-01-01T09:32:31.692000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.5",
            "organization": "org",
            "path": "path/to/registry:v4.5"
        }, {
            "_id": "2",
            "created_by": "meteor",
            "creation_date": "2020-01-01T09:32:38.486000+00:00",
            "last_update_date": "2020-01-01T09:32:38.486000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.6",
            "organization": "org",
            "path": "path/to/registry:v4.6"
        }, {
            "_id": "2",
            "created_by": "meteor",
            "creation_date": "2020-01-01T09:32:38.486000+00:00",
            "last_update_date": "2020-01-01T09:32:38.486000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.6",
            "organization": "org",
            "path": ""
        }]

        self.bundles = [
            {
                "channel_name":
                "streams-1.5.x",
                "related_images": [{
                    "image": "registry/amq7/amq-streams-r-operator@sha256:111",
                    "name": "strimzi-cluster-operator",
                    "digest": "sha256:111"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-24-r@sha256:222",
                    "name": "strimzi-kafka-24",
                    "digest": "sha256:222"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-25-r@sha256:333",
                    "name": "strimzi-kafka-25",
                    "digest": "sha256:333"
                }, {
                    "image": "registry/amq7/amq-streams-bridge-r@sha256:444",
                    "name": "strimzi-bridge",
                    "digest": "sha256:444"
                }],
                "version":
                "1.5.3"
            },
            {
                "channel_name":
                "streams-1.5.x",
                "related_images": [{
                    "image": "registry/amq7/amq-streams-r-operator@sha256:555",
                    "name": "strimzi-cluster-operator",
                    "digest": "sha256:555"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-24-r@sha256:666",
                    "name": "strimzi-kafka-24",
                    "digest": "sha256:666"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-25-r@sha256:777",
                    "name": "strimzi-kafka-25",
                    "digest": "sha256:777"
                }, {
                    "image": "registry/amq7/amq-streams-bridge-r@sha256:888",
                    "name": "strimzi-bridge",
                    "digest": "sha256:888"
                }],
                "version":
                "1.5.4"
            },
            {
                "channel_name":
                "stable",
                "related_images": [{
                    "image": "registry/amq7/amq--operator@sha256:999",
                    "name": "strimzi-cluster-operator",
                    "digest": "sha256:999"
                }, {
                    "image": "registry/amq7/kafka-24-r@sha256:aaa",
                    "name": "strimzi-kafka-24",
                    "digest": "sha256:aaa"
                }, {
                    "image": "registry/amq7/kafka-25-r@sha256:bbb",
                    "name": "strimzi-kafka-25",
                    "digest": "sha256:bbb"
                }, {
                    "image": "registry/amq7/amq-streams-bridge-r@sha256:ccc",
                    "name": "strimzi-bridge",
                    "digest": "sha256:ccc"
                }],
                "version":
                "1.5.3"
            },
            {
                "channel_name":
                "stable",
                "related_images": [{
                    "image": "registry/tracing/j-operator:1.13.2",
                    "name": "j-1.13.2-annotation",
                    "digest": "sha256:fff"
                }, {
                    "image": "registry/tracing/j-operator:1.13.2",
                    "name": "j-operator",
                    "digest": "sha256:ffff"
                }],
                "version":
                "1.5.2"
            },
            {
                "channel_name":
                "quay-v3.3",
                "related_images": [{
                    "image": "registry/quay/quay-operator@sha256:ddd",
                    "name": "quay-operator-annotation",
                    "digest": "sha256:ddd"
                }, {
                    "image":
                    "registry/quay/quay-security-r-operator@sha256:eee",
                    "name": "container-security-operator",
                    "digest": "sha256:eee"
                }],
                "version":
                "3.3.1"
            },
        ]

        self.images = [{
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.ppc64le",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:1111",
                "published": False,
                "registry": "reg1",
                "repository": "repo1",
                "tags": [{
                    "name": "tag0"
                }]
            }, {
                "manifest_list_digest": "sha256:1112",
                "published": True,
                "registry": "reg2",
                "repository": "repo2",
                "tags": [{
                    "name": "tag1"
                }, {
                    "name": "tag2"
                }]
            }]
        }, {
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.s390x",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:2222",
                "published": True,
                "registry": "reg2",
                "repository": "repo2",
                "tags": [{
                    "name": "tag2"
                }]
            }]
        }, {
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.amd64",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:3333",
                "published": True,
                "registry": "reg3",
                "repository": "repo3",
                "tags": [{
                    "name": "latest"
                }]
            }]
        }, {
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.arm64",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:4444",
                "published": True,
                "registry": "reg4",
                "repository": "repo4",
                "tags": [{
                    "name": "tag1"
                }]
            }]
        }]
Exemple #4
0
class TestQueryPyxis(helpers.FreshmakerTestCase):
    def setUp(self):
        super().setUp()

        self.patcher = helpers.Patcher('freshmaker.pyxis.')

        self.fake_server_url = 'https://pyxis.localhost/'
        self.px = Pyxis(self.fake_server_url)
        self.response = create_autospec(requests.Response)
        self.response.status_code = HTTPStatus.OK
        self.bad_requests_response = {
            "detail": [
                "Unable to parse the filter from URL.",
                "Please verify the 'Field Name' in the RSQL Expression.",
                "Please visit the following end-point for more details:",
                "    /v1/docs/filtering-language"
            ],
            "status":
            400,
            "title":
            "Bad Request",
            "type":
            "about:blank"
        }

        self.empty_response_page = {
            "data": [],
            "page": 0,
            "page_size": 100,
            "total": 0
        }

        self.indices = [{
            "_id": "1",
            "created_by": "meteor",
            "creation_date": "2020-01-01T09:32:31.692000+00:00",
            "last_update_date": "2020-01-01T09:32:31.692000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.5",
            "organization": "org",
            "path": "path/to/registry:v4.5"
        }, {
            "_id": "2",
            "created_by": "meteor",
            "creation_date": "2020-01-01T09:32:38.486000+00:00",
            "last_update_date": "2020-01-01T09:32:38.486000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.6",
            "organization": "org",
            "path": "path/to/registry:v4.6"
        }, {
            "_id": "2",
            "created_by": "meteor",
            "creation_date": "2020-01-01T09:32:38.486000+00:00",
            "last_update_date": "2020-01-01T09:32:38.486000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.6",
            "organization": "org",
            "path": ""
        }]

        self.bundles = [
            {
                "channel_name":
                "streams-1.5.x",
                "related_images": [{
                    "image": "registry/amq7/amq-streams-r-operator@sha256:111",
                    "name": "strimzi-cluster-operator",
                    "digest": "sha256:111"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-24-r@sha256:222",
                    "name": "strimzi-kafka-24",
                    "digest": "sha256:222"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-25-r@sha256:333",
                    "name": "strimzi-kafka-25",
                    "digest": "sha256:333"
                }, {
                    "image": "registry/amq7/amq-streams-bridge-r@sha256:444",
                    "name": "strimzi-bridge",
                    "digest": "sha256:444"
                }],
                "version":
                "1.5.3"
            },
            {
                "channel_name":
                "streams-1.5.x",
                "related_images": [{
                    "image": "registry/amq7/amq-streams-r-operator@sha256:555",
                    "name": "strimzi-cluster-operator",
                    "digest": "sha256:555"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-24-r@sha256:666",
                    "name": "strimzi-kafka-24",
                    "digest": "sha256:666"
                }, {
                    "image": "registry/amq7/amq-streams-kafka-25-r@sha256:777",
                    "name": "strimzi-kafka-25",
                    "digest": "sha256:777"
                }, {
                    "image": "registry/amq7/amq-streams-bridge-r@sha256:888",
                    "name": "strimzi-bridge",
                    "digest": "sha256:888"
                }],
                "version":
                "1.5.4"
            },
            {
                "channel_name":
                "stable",
                "related_images": [{
                    "image": "registry/amq7/amq--operator@sha256:999",
                    "name": "strimzi-cluster-operator",
                    "digest": "sha256:999"
                }, {
                    "image": "registry/amq7/kafka-24-r@sha256:aaa",
                    "name": "strimzi-kafka-24",
                    "digest": "sha256:aaa"
                }, {
                    "image": "registry/amq7/kafka-25-r@sha256:bbb",
                    "name": "strimzi-kafka-25",
                    "digest": "sha256:bbb"
                }, {
                    "image": "registry/amq7/amq-streams-bridge-r@sha256:ccc",
                    "name": "strimzi-bridge",
                    "digest": "sha256:ccc"
                }],
                "version":
                "1.5.3"
            },
            {
                "channel_name":
                "stable",
                "related_images": [{
                    "image": "registry/tracing/j-operator:1.13.2",
                    "name": "j-1.13.2-annotation",
                    "digest": "sha256:fff"
                }, {
                    "image": "registry/tracing/j-operator:1.13.2",
                    "name": "j-operator",
                    "digest": "sha256:ffff"
                }],
                "version":
                "1.5.2"
            },
            {
                "channel_name":
                "quay-v3.3",
                "related_images": [{
                    "image": "registry/quay/quay-operator@sha256:ddd",
                    "name": "quay-operator-annotation",
                    "digest": "sha256:ddd"
                }, {
                    "image":
                    "registry/quay/quay-security-r-operator@sha256:eee",
                    "name": "container-security-operator",
                    "digest": "sha256:eee"
                }],
                "version":
                "3.3.1"
            },
        ]

        self.images = [{
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.ppc64le",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:1111",
                "published": False,
                "registry": "reg1",
                "repository": "repo1",
                "tags": [{
                    "name": "tag0"
                }]
            }, {
                "manifest_list_digest": "sha256:1112",
                "published": True,
                "registry": "reg2",
                "repository": "repo2",
                "tags": [{
                    "name": "tag1"
                }, {
                    "name": "tag2"
                }]
            }]
        }, {
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.s390x",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:2222",
                "published": True,
                "registry": "reg2",
                "repository": "repo2",
                "tags": [{
                    "name": "tag2"
                }]
            }]
        }, {
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.amd64",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:3333",
                "published": True,
                "registry": "reg3",
                "repository": "repo3",
                "tags": [{
                    "name": "latest"
                }]
            }]
        }, {
            "brew": {
                "build": "s2i-1-2",
                "completion_date": "2020-08-12T11:31:39+00:00",
                "nvra": "s2i-1-2.arm64",
                "package": "s2i-core-container"
            },
            "repositories": [{
                "manifest_list_digest": "sha256:4444",
                "published": True,
                "registry": "reg4",
                "repository": "repo4",
                "tags": [{
                    "name": "tag1"
                }]
            }]
        }]

    def tearDown(self):
        super().tearDown()
        self.patcher.unpatch_all()

    @staticmethod
    def copy_call_args(mock):
        """
        Copy args of Mock to another Mock so we can check call args if we call
        mock with mutable args and change it between calls
        """
        new_mock = Mock()

        def side_effect(*args, **kwargs):
            args = deepcopy(args)
            kwargs = deepcopy(kwargs)
            return new_mock(*args, **kwargs)

        mock.side_effect = side_effect
        return new_mock

    @patch('freshmaker.pyxis.HTTPKerberosAuth')
    @patch('freshmaker.pyxis.requests.get')
    def test_make_request(self, get, auth):
        get.return_value = self.response
        test_params = {'key1': 'val1'}
        self.px._make_request('test', test_params)

        get_url = self.fake_server_url + 'v1/test'
        self.response.json.assert_called_once()
        test_params['page_size'] = "100"
        get.assert_called_once_with(get_url,
                                    params=test_params,
                                    auth=auth(),
                                    timeout=conf.net_timeout)

    @patch('freshmaker.pyxis.HTTPKerberosAuth')
    @patch('freshmaker.pyxis.requests.get')
    def test_make_request_error(self, get, auth):
        get.return_value = self.response
        self.response.ok = False
        self.response.json.side_effect = ValueError
        self.response.json.text = 'test message'
        self.response.request = Mock()
        self.response.request.url = 'test/url'

        with self.assertRaises(PyxisRequestError, msg='test message'):
            self.px._make_request('test', {})

    @patch('freshmaker.pyxis.HTTPKerberosAuth')
    @patch('freshmaker.pyxis.Pyxis._make_request')
    def test_pagination(self, request, auth):
        my_request = self.copy_call_args(request)
        my_request.side_effect = [{
            "page": 0,
            "data": ["fake_data1"]
        }, {
            "page": 1,
            "data": ["fake_data2"]
        }, {
            "page": 2,
            "data": []
        }]
        test_params = {'include': ['total', 'field1']}
        entity = 'test'
        auth.return_value = 1
        self.px._pagination(entity, test_params)

        self.assertEqual(request.call_count, 3)
        default_params = {'page_size': '100', 'include': ['total', 'field1']}
        calls = [
            call('test', params={
                **default_params, 'page': 0
            }),
            call('test', params={
                **default_params, 'page': 1
            }),
            call('test', params={
                **default_params, 'page': 2
            })
        ]
        my_request.assert_has_calls(calls)

    @patch.object(conf, 'pyxis_index_image_organization', new='org')
    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_operator_indices(self, page):
        self.px.get_operator_indices()
        page.assert_called_once_with('operators/indices',
                                     {'filter': 'organization==org'})

    @patch.object(conf,
                  "product_pages_api_url",
                  new="http://pp.example.com/api")
    @patch("freshmaker.pyxis.Pyxis._pagination")
    def test_get_operator_indices_with_unreleased_filtered_out(self, page):
        pp_mock_data = [{
            "url":
            "http://pp.example.com/api/releases/openshift-4.5/schedule-tasks",
            "json": [{
                "name": "GA",
                "date_finish": "2020-02-05"
            }]
        }, {
            "url":
            "http://pp.example.com/api/releases/openshift-4.6/schedule-tasks",
            "json": [{
                "name": "GA",
                "date_finish": "2020-05-23"
            }]
        }, {
            "url":
            "http://pp.example.com/api/releases/openshift-4.8/schedule-tasks",
            "json": [{
                "name": "GA",
                "date_finish": "2021-08-12"
            }]
        }]
        page.return_value = self.indices + [{
            "_id": "3",
            "created_by": "meteor",
            "creation_date": "2020-11-01T08:23:28.253000+00:00",
            "last_update_date": "2020-11-01T08:23:28.253000+00:00",
            "last_updated_by": "meteor",
            "ocp_version": "4.8",
            "organization": "org",
            "path": ""
        }]
        now = datetime(year=2020, month=12, day=15, hour=0, minute=0, second=0)

        with requests_mock.Mocker() as http:
            for data in pp_mock_data:
                http.get(data["url"], json=data["json"])

            with freeze_time(now):
                indices = self.px.get_operator_indices()

        assert len(indices) == 3
        assert "4.8" not in [i["ocp_version"] for i in indices]

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_bundles_per_index_image(self, page):
        page_copy = self.copy_call_args(page)
        page_copy.side_effect = [self.bundles[0:2], self.bundles[2:]]
        out = self.px._get_bundles_per_index_image(self.indices)

        expected_out = {
            "path/to/registry:v4.5": self.bundles[0:2],
            "path/to/registry:v4.6": self.bundles[2:]
        }

        self.assertEqual(out, expected_out)
        page.assert_has_calls([
            call(
                'operators/bundles', {
                    'include':
                    'data.channel_name,data.version,'
                    'data.related_images,data.bundle_path_digest,'
                    'data.bundle_path',
                    'filter':
                    'source_index_container_path==path/to/registry:v4.6'
                }),
            call(
                'operators/bundles', {
                    'include':
                    'data.channel_name,data.version,'
                    'data.related_images,data.bundle_path_digest,'
                    'data.bundle_path',
                    'filter':
                    'source_index_container_path==path/to/registry:v4.6'
                })
        ])

    @patch('freshmaker.pyxis.Pyxis._get_bundles_per_index_image')
    def test_get_latest_bundles(self, get_bundles_per_indices):
        get_bundles_per_indices.return_value = {
            "path/to/registry:v4.5": self.bundles[0:2],
            "path/to/registry:v4.6": self.bundles[2:]
        }

        out = self.px.get_latest_bundles(self.indices)
        # we expect 0 and 3 bundles to be filtered out, because of older versions
        expected_out = self.bundles[1:3] + self.bundles[4:]
        self.assertEqual(out, expected_out)
        get_bundles_per_indices.assert_called_once()

    @patch('freshmaker.pyxis.Pyxis._get_bundles_per_index_image')
    def test_get_latest_bundles_invalid_version(self, get_bundles_per_indices):
        # set invalid version
        for bundle in self.bundles:
            bundle['version'] = "InvalidVersion"
        get_bundles_per_indices.return_value = {
            "path/to/registry:v4.5": self.bundles[0:2],
            "path/to/registry:v4.6": self.bundles[2:]
        }

        with self.assertLogs("freshmaker", level="WARNING"):
            bundles = self.px.get_latest_bundles(self.indices)
            self.assertEqual(bundles, [])
            get_bundles_per_indices.assert_called_once()

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_digests_by_nvrs(self, page):
        page.return_value = self.images
        digests = self.px.get_digests_by_nvrs(['s2i-1-2'])

        expected_digests = {
            'sha256:1112', 'sha256:4444', 'sha256:3333', 'sha256:2222'
        }
        self.assertEqual(digests, expected_digests)
        page.assert_called_once_with(
            'images/nvr/s2i-1-2', {'include': 'data.brew,data.repositories'})

    def test_filter_bundles_by_related_image_digests(self):
        digests = {'sha256:111', 'sha256:bbb', 'sha256:ddd'}
        new_bundles = self.px.filter_bundles_by_related_image_digests(
            digests, self.bundles)

        expected_bundles = [self.bundles[0], self.bundles[2], self.bundles[4]]
        self.assertListEqual(new_bundles, expected_bundles)

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_images_by_digests(self, page):
        my_pagination = self.copy_call_args(page)
        my_pagination.return_value = [self.images[0], self.images[1]]
        images = self.px.get_images_by_digests(['sha256:111', 'sha256:222'])

        my_pagination.assert_called_once_with(
            'images', {
                'include':
                'data.brew,data.repositories',
                'filter':
                'repositories.manifest_list_digest=in='
                '(sha256:111,sha256:222)'
            })
        self.assertEqual(page.call_count, 1)
        self.assertEqual(self.images[0:2], images)

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_add_repositories_info(self, page):
        reg_repo_info = {
            ('reg1', 'rep1'): {
                'nvrs': {'nvr1', 'nvr2'}
            },
            ('reg1', 'rep2'): {
                'nvrs': {'nvr3'}
            },
            ('reg2', 'rep1'): {
                'nvrs': {'nvr4', 'nvr5'}
            },
            ('reg3', 'rep3'): {
                'nvrs': {'nvr6', 'nvr7'}
            },
        }
        page.return_value = [{
            'auto_rebuild_tags': ['tag1', 'tag2'],
            'registry': 'reg1',
            'repository': 'rep1'
        }, {
            'auto_rebuild_tags': ['latest'],
            'registry': 'reg1',
            'repository': 'rep2'
        }, {
            'auto_rebuild_tags': ['tag1', 'tag2'],
            'registry': 'reg2',
            'repository': 'rep1'
        }, {
            'registry': 'reg3',
            'repository': 'rep3'
        }]

        self.px._add_repositories_info(reg_repo_info)

        params = {
            'include':
            'data.auto_rebuild_tags,data.registry,data.repository',
            'filter':
            '(registry==reg1;repository==rep1),'
            '(registry==reg1;repository==rep2),'
            '(registry==reg2;repository==rep1),'
            '(registry==reg3;repository==rep3)'
        }
        page.assert_called_with('repositories', params)

        expected_info = {
            ('reg1', 'rep1'): {
                'nvrs': {'nvr1', 'nvr2'},
                'auto_rebuild_tags': {'tag1', 'tag2'}
            },
            ('reg1', 'rep2'): {
                'nvrs': {'nvr3'},
                'auto_rebuild_tags': {'latest'}
            },
            ('reg2', 'rep1'): {
                'nvrs': {'nvr4', 'nvr5'},
                'auto_rebuild_tags': {'tag1', 'tag2'}
            },
        }
        self.assertEqual(reg_repo_info, expected_info)

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_filter_auto_rebuild_nvrs(self, page):
        my_page = self.copy_call_args(page)
        reg_repo_info = {
            ('reg1', 'rep1'): {
                'nvrs': {'nvr1', 'nvr2'},
                'auto_rebuild_tags': ['tag1', 'tag2']
            },
            ('reg1', 'rep2'): {
                'nvrs': {'nvr3'},
                'auto_rebuild_tags': ['latest']
            },
            ('reg2', 'rep1'): {
                'nvrs': {'nvr4', 'nvr5'},
                'auto_rebuild_tags': ['tag1', 'tag2']
            },
            ('reg3', 'rep3'): {
                'auto_rebuild_tags': ['tag1', 'tag2']
            }
        }
        my_page.side_effect = [[{
            'brew': {
                'build': 'nvr1'
            }
        }, {
            'brew': {
                'build': 'nvr2'
            }
        }], [], [], [{
            'brew': {
                'build': 'nvr4'
            }
        }], [{
            'brew': {
                'build': 'nvr5'
            }
        }], []]

        ret = self.px._filter_auto_rebuild_nvrs(reg_repo_info)
        expected = {'nvr1', 'nvr2', 'nvr4', 'nvr5'}
        self.assertEqual(ret, expected)
        self.assertEqual(my_page.call_count, 5)
        # using .join() method because it's unclear how set will be ordered
        my_page.assert_has_calls([
            call(
                'repositories/registry/reg1/repository/rep1/tag/tag1', {
                    'include': 'data.brew.build',
                    'filter': f'brew.build=in=({",".join({"nvr1", "nvr2"})})'
                }),
            call(
                'repositories/registry/reg1/repository/rep1/tag/tag2', {
                    'include': 'data.brew.build',
                    'filter': f'brew.build=in=({",".join({"nvr1", "nvr2"})})'
                }),
            call('repositories/registry/reg1/repository/rep2/tag/latest', {
                'include': 'data.brew.build',
                'filter': 'brew.build=in=(nvr3)'
            }),
            call(
                'repositories/registry/reg2/repository/rep1/tag/tag1', {
                    'include': 'data.brew.build',
                    'filter': f'brew.build=in=({",".join({"nvr4","nvr5"})})'
                }),
            call(
                'repositories/registry/reg2/repository/rep1/tag/tag2', {
                    'include': 'data.brew.build',
                    'filter': f'brew.build=in=({",".join({"nvr4","nvr5"})})'
                }),
        ])

    @patch('freshmaker.pyxis.Pyxis._filter_auto_rebuild_nvrs')
    @patch('freshmaker.pyxis.Pyxis._add_repositories_info')
    def test_get_auto_rebuild_tagged_images(self, info, tag_filter):
        tag_filter.return_value = {'nvr1', 'nvr2', 'nvr4', 'nvr5'}
        # change nvr for the second image to have more variations to test
        self.images[1]['brew']['build'] = 'nvr1'

        ret = self.px.get_auto_rebuild_tagged_images(self.images)

        info.assert_called_once_with({
            ('reg1', 'repo1'): {
                'nvrs': {'s2i-1-2'}
            },
            ('reg2', 'repo2'): {
                'nvrs': {'s2i-1-2', 'nvr1'}
            },
            ('reg3', 'repo3'): {
                'nvrs': {'s2i-1-2'}
            },
            ('reg4', 'repo4'): {
                'nvrs': {'s2i-1-2'}
            },
        })

        expected_ret = {'nvr1', 'nvr2', 'nvr4', 'nvr5'}
        self.assertEqual(ret, expected_ret)

    @patch('freshmaker.pyxis.Pyxis._filter_auto_rebuild_nvrs')
    @patch('freshmaker.pyxis.Pyxis._add_repositories_info')
    def test_no_nvr_and_repos(self, info, tag_filter):
        del self.images[0]['brew']['build']
        del self.images[1]['repositories']
        self.images[1]['brew']['build'] = 'nvr1'

        with self.assertLogs('freshmaker', level='WARNING') as log:
            self.px.get_auto_rebuild_tagged_images(self.images)
            self.assertTrue('One of bundle images doesn\'t have brew.build' in
                            log.output[0])
            self.assertTrue('Bundle image nvr1 doesn\'t have repositories set'
                            in log.output[1])
class HandleBotasAdvisory(ContainerBuildHandler):
    """
    Handles event that was created by transition of an advisory filed by
    BOTAS to SHIPPED_LIVE state
    """
    name = "HandleBotasAdvisory"

    def __init__(self, pyxis=None):
        super().__init__()
        if pyxis:
            self._pyxis = pyxis
        else:
            if not conf.pyxis_server_url:
                raise ValueError("'PYXIS_SERVER_URL' parameter should be set")
            self._pyxis = Pyxis(conf.pyxis_server_url)

        if not conf.freshmaker_root_url or "://" not in conf.freshmaker_root_url:
            raise ValueError(
                "'FRESHMAKER_ROOT_URL' parameter should be set to "
                "a valid URL")

    def can_handle(self, event):
        if (isinstance(event, BotasErrataShippedEvent)
                and 'docker' in event.advisory.content_types):
            return True

        return False

    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = self._create_original_to_rebuilt_nvrs_map()

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image")

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image")

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        # A mapping of digests to bundle metadata. This metadata is used to
        # for the CSV metadata updates.
        bundle_mds_by_digest = {}

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles)
            if not bundles:
                log.info(
                    f"No latest bundle image with the related image of {image_nvr}"
                )
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_mds_by_digest[bundle_digest] = bundle
                bundle_digests_by_related_nvr.setdefault(
                    image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            # CSV modifications for the rebuilt bundle image
            'pullspecs': [],
            'append': {},
            'update': {},
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_mds_by_digest:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                continue

            bundles_by_digest.setdefault(digest,
                                         copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (buildinfo.get('extra', {}).get('image', {}).get(
                'operator_manifests', {}).get('related_images', {}))
            bundle_data['osbs_pinning'] = related_images.get(
                'created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspecs'] = related_images.get('pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild']
                    and bundle_data['osbs_pinning']):
                continue

            csv_name = bundle_mds_by_digest[digest]['csv_name']
            version = bundle_mds_by_digest[digest]['version']
            bundle_data.update(self._get_csv_updates(csv_name, version))

            for pullspec in bundle_data['pullspecs']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>'
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True
                # }

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec
                # Always set pinned to True when it was replaced by Freshmaker
                # since it indicates that the pullspec was modified from the
                # original pullspec
                pullspec['pinned'] = True

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                to_rebuild_digests.add(digest)

        if not to_rebuild_digests:
            msg = f"No bundle images to rebuild for advisory {event.advisory.name}"
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        builds = self._prepare_builds(db_event, bundles_by_digest,
                                      to_rebuild_digests)

        # Reset context to db_event.
        self.set_context(db_event)

        self.start_to_build_images(builds)
        msg = f"Advisory {db_event.search_key}: Rebuilding " \
              f"{len(db_event.builds.all())} bundle images."
        db_event.transition(EventState.BUILDING, msg)

        return []

    @classmethod
    def _get_csv_updates(cls, csv_name, version):
        """
        Determine the CSV updates required for the bundle image.

        :param str csv_name: the name field in the bundle's ClusterServiceVersion file
        :param str version: the version of the bundle image being rebuilt
        :return: a dictionary of the CSV updates needed
        :rtype: dict
        """
        csv_modifications = {}
        # Make sure that OLM will skip the version being rebuilt when upgrading to the rebuilt
        # version
        csv_modifications['append'] = {
            'spec': {
                'skips': [version],
            }
        }

        new_version, fm_suffix = cls._get_rebuild_bundle_version(version)
        new_csv_name = cls._get_csv_name(csv_name, version, new_version,
                                         fm_suffix)
        csv_modifications['update'] = {
            'metadata': {
                # Update the name of the CSV to something uniquely identify the rebuild
                'name': new_csv_name,
                # Declare that this rebuild is a substitute of the bundle being rebuilt
                'substitutes-for': version,
            },
            'spec': {
                # Update the version of the rebuild to be unique and a newer version than the
                # the version of the bundle being rebuilt
                'version': new_version,
            }
        }

        return csv_modifications

    @classmethod
    def _get_rebuild_bundle_version(cls, version):
        """
        Get a bundle version for the Freshmaker rebuild of the bundle image.

        Examples:
            1.2.3 => 1.2.3+0.$timestamp.patched (no build ID and not a rebuild)
            1.2.3+48273 => 1.2.3+48273.0.$timestamp.patched (build ID and not a rebuild)
            1.2.3+48273.0.1616457250.patched => 1.2.3+48273.0.$timestamp.patched (build ID and a rebuild)

        :param str version: the version of the bundle image being rebuilt
        :return: a tuple of the bundle version of the Freshmaker rebuild of the bundle image and
            the suffix that was added by Freshmaker
        :rtype: tuple(str, str)
        """
        parsed_version = semver.VersionInfo.parse(version)
        # Strip off the microseconds of the timestamp
        timestamp = int(datetime.utcnow().timestamp())
        new_fm_suffix = f'0.{timestamp}.patched'
        if parsed_version.build:
            # Check if the bundle was a Freshmaker rebuild
            fm_suffix_search = re.search(r'(?P<fm_suffix>0\.\d+\.patched)$',
                                         parsed_version.build)
            if fm_suffix_search:
                fm_suffix = fm_suffix_search.groupdict()['fm_suffix']
                # Get the build without the Freshmaker suffix. This may include a build ID
                # from the original build before Freshmaker rebuilt it or be empty.
                build_wo_fm_suffix = parsed_version.build[:-len(fm_suffix)]
                new_build = f"{build_wo_fm_suffix}{new_fm_suffix}"
            else:
                # This was not previously rebuilt by Freshmaker so just append the suffix
                # to the existing build ID with '.' separating it.
                new_build = f"{parsed_version.build}.{new_fm_suffix}"
        else:
            # If there is existing build ID, then make the Freshmaker suffix the build ID
            new_build = new_fm_suffix

        new_version = str(parsed_version.replace(build=new_build))

        return new_version, new_fm_suffix

    @staticmethod
    def _get_csv_name(csv_name, version, rebuild_version, fm_suffix):
        """
        Get a bundle CSV name for the Freshmaker rebuild of the bundle image.

        :param str csv_name: the name of the ClusterServiceVersion (CSV) file of the bundle image
        :param str version: the version of the bundle image being rebuilt
        :param str rebuild_version: the new version being assigned by Freshmaker for the rebuild
        :param str fm_suffix: the portion of rebuild_version that was generated by Freshmaker
        :return: the bundle ClusterServiceVersion (CSV) name of the Freshmaker rebuild of the bundle
            image
        :rtype: str
        """
        if version in csv_name:
            return csv_name.replace(version, rebuild_version)
        else:
            return f'{csv_name}.{fm_suffix}'

    def get_published_original_nvr(self, rebuilt_nvr):
        """
        Search for an original build, that has been built and published to a
            repository, and get original_nvr from it

        :param str rebuilt_nvr: rebuilt NVR to look build by
        :rtype: str or None
        :return: original NVR from the first published FM build for given NVR
        """
        original_nvr = None
        # artifact build should be only one in database, or raise an error
        artifact_build = db.session.query(ArtifactBuild).filter(
            ArtifactBuild.rebuilt_nvr == rebuilt_nvr,
            ArtifactBuild.type == ArtifactType.IMAGE.value,
        ).one_or_none()
        # recursively search for original artifact build
        if artifact_build is not None:
            original_nvr = artifact_build.original_nvr

            # check if image is published
            request_params = {'include': 'data.repositories', 'page_size': 1}
            images = self._pyxis._pagination(f'images/nvr/{original_nvr}',
                                             request_params)
            if not images:
                return None
            # stop recursion if the image is published in some repo
            if any(repo['published']
                   for repo in images[0].get('repositories')):
                return original_nvr

            next_nvr = self.get_published_original_nvr(original_nvr)
            if next_nvr is not None:
                original_nvr = next_nvr

        return original_nvr

    def image_has_auto_rebuild_tag(self, image):
        """ Check if image has a tag enabled for auto rebuild.

        :param dict image: Dict representation of an image entity in Pyxis.
        :rtype: bool
        :return: True if image has a tag enabled for auto rebuild in repository, otherwise False.
        """
        for repo in image['repositories']:
            # Skip unpublished repository
            if not repo['published']:
                continue

            auto_rebuild_tags = self._pyxis.get_auto_rebuild_tags(
                repo['registry'], repo['repository'])
            tags = [t['name'] for t in repo.get('tags', [])]
            if set(auto_rebuild_tags) & set(tags):
                return True
        return False

    def _create_original_to_rebuilt_nvrs_map(self):
        """
        Creates mapping of original build NVRs to rebuilt NVRs in advisory.
        Including NVRs of the builds from the blocking advisories

        :rtype: dict
        :return: map of the original NVRs as keys and rebuilt NVRs as values
        """
        nvrs_mapping = {}

        # Get builds from all blocking advisories
        blocking_advisories_builds = \
            Errata().get_blocking_advisories_builds(self.event.advisory.errata_id)
        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build
        for product_info in self.event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                nvrs_mapping[original_nvr] = build['nvr']
                build_nvr = parse_nvr(build['nvr'])

                # Check builds from blocking advisories and add to the mapping
                # all of them, that have overlapping package names
                for block_build in blocking_advisories_builds:
                    block_build_nvr = parse_nvr(block_build)
                    if block_build_nvr['name'] == build_nvr['name'] and \
                            block_build_nvr['version'] == build_nvr['version']:
                        nvrs_mapping[block_build] = build['nvr']
        return nvrs_mapping

    def _prepare_builds(self, db_event, bundles_by_digest, to_rebuild_digests):
        """
        Prepare models.ArtifactBuild instance for every bundle that will be
        rebuilt

        :param models.Event db_event: database event that will contain builds
        :param dict bundles_by_digest: mapping of bundle digest to bundle data
        :param list to_rebuild_digests: digests of bundles to rebuild
        :return: builds that already in database and ready to be submitted to brew
        :rtype: list
        """
        builds = []
        csv_mod_url = conf.freshmaker_root_url + "/api/2/pullspec_overrides/{}"
        for digest in to_rebuild_digests:
            bundle = bundles_by_digest[digest]
            # Reset context to db_event for each iteration before
            # the ArtifactBuild is created.
            self.set_context(db_event)

            rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value
            bundle_name = koji.parse_NVR(bundle["nvr"])["name"]

            build = self.record_build(db_event,
                                      bundle_name,
                                      ArtifactType.IMAGE,
                                      state=ArtifactBuildState.PLANNED.value,
                                      original_nvr=bundle["nvr"],
                                      rebuild_reason=rebuild_reason)

            # Set context to particular build so logging shows this build
            # in case of error.
            self.set_context(build)

            build.transition(ArtifactBuildState.PLANNED.value, "")

            additional_data = ContainerImage.get_additional_data_from_koji(
                bundle["nvr"])
            build.build_args = json.dumps({
                "repository":
                additional_data["repository"],
                "commit":
                additional_data["commit"],
                "target":
                additional_data["target"],
                "branch":
                additional_data["git_branch"],
                "arches":
                additional_data["arches"],
                "operator_csv_modifications_url":
                csv_mod_url.format(build.id),
            })
            build.bundle_pullspec_overrides = {
                "append": bundle["append"],
                "pullspecs": bundle["pullspecs"],
                "update": bundle["update"],
            }

            db.session.commit()
            builds.append(build)
        return builds
class HandleBotasAdvisory(ContainerBuildHandler):
    """
    Handles event that was created by transition of an advisory filed by
    BOTAS to SHIPPED_LIVE state
    """
    name = "HandleBotasAdvisory"
    # This prefix should be added to event reason, when skipping the event.
    # Because Release Driver checks event's reason for certain prefixes,
    # to determine if there is an error in bundles processing.
    _no_bundle_prefix = "No bundles to rebuild: "

    def __init__(self, pyxis=None):
        super().__init__()
        if pyxis:
            self._pyxis = pyxis
        else:
            if not conf.pyxis_server_url:
                raise ValueError("'PYXIS_SERVER_URL' parameter should be set")
            self._pyxis = Pyxis(conf.pyxis_server_url)

        if not conf.freshmaker_root_url or "://" not in conf.freshmaker_root_url:
            raise ValueError(
                "'FRESHMAKER_ROOT_URL' parameter should be set to "
                "a valid URL")
        # Currently processed event
        self.event = None

    def can_handle(self, event):
        if (isinstance(event, BotasErrataShippedEvent)
                and 'docker' in event.advisory.content_types):
            return True
        # This handler can handle manual bundle rebuilds too
        if isinstance(event, ManualBundleRebuild):
            return True

        return False

    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        if isinstance(event, ManualBundleRebuild):
            bundles_to_rebuild = self._handle_manual_rebuild(db_event)
        else:
            bundles_to_rebuild = self._handle_auto_rebuild(db_event)

        if not bundles_to_rebuild:
            return []

        builds = self._prepare_builds(db_event, bundles_to_rebuild)

        # Reset context to db_event.
        self.set_context(db_event)

        self.start_to_build_images(builds)
        msg = f"Advisory {db_event.search_key}: Rebuilding " \
              f"{len(db_event.builds.all())} bundle images."
        db_event.transition(EventState.BUILDING, msg)

        return []

    def _handle_auto_rebuild(self, db_event):
        """
        Handle auto rebuild for an advisory created by Botas

        :param db_event: database event that represent rebuild event
        :rtype: list
        :return: list of advisories that should be rebuilt
        """
        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = self._create_original_to_rebuilt_nvrs_map()

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                self.event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image")

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            # Don't require that the manifest list digest be published in this case because
            # there's a delay from after an advisory is shipped and when the published repositories
            # entry is populated
            digest = self._pyxis.get_manifest_list_digest_by_nvr(
                nvr, must_be_published=False)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image")

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)
        self.log_debug(
            "There are %d bundles that are latest in a channel in the found index images",
            len(all_bundles),
        )

        # A mapping of digests to bundle metadata. This metadata is used to
        # for the CSV metadata updates.
        bundle_mds_by_digest = {}

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles)
            if not bundles:
                log.info(
                    f"No latest bundle image with the related image of {image_nvr}"
                )
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_mds_by_digest[bundle_digest] = bundle
                bundle_digests_by_related_nvr.setdefault(
                    image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []
        self.log_info("Found %d bundles with relevant related images",
                      len(bundle_digests_by_related_nvr))

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            # CSV modifications for the rebuilt bundle image
            'pullspec_replacements': [],
            'update': {},
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_mds_by_digest:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                self.log_warn(
                    'The bundle digest %r was not found in Pyxis. Skipping.',
                    digest)
                continue

            bundles_by_digest.setdefault(digest,
                                         copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (buildinfo.get('extra', {}).get('image', {}).get(
                'operator_manifests', {}).get('related_images', {}))
            bundle_data['osbs_pinning'] = related_images.get(
                'created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspec_replacements'] = related_images.get(
                'pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild']
                    and bundle_data['osbs_pinning']):
                self.log_info(
                    'The bundle %r does not have auto-rebuild tags (%r) and/or OSBS pinning (%r)',
                    bundle_data['nvr'],
                    bundle_data['auto_rebuild'],
                    bundle_data['osbs_pinning'],
                )
                continue

            csv_name = bundle_mds_by_digest[digest]['csv_name']
            version = bundle_mds_by_digest[digest]['version']
            bundle_data.update(self._get_csv_updates(csv_name, version))

            for pullspec in bundle_data['pullspec_replacements']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>',
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True,
                #   # value used for internal purpose during manual rebuilds, it's an old pullspec that was replaced
                #   '_old': 'registry.exampe.io/repo/example-operator@sha256:<previous-sha256-value>,
                # }

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # save pullspec that image had before rebuild
                pullspec['_old'] = pullspec.get('new')

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec
                # Always set pinned to True when it was replaced by Freshmaker
                # since it indicates that the pullspec was modified from the
                # original pullspec
                pullspec['pinned'] = True

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                self.log_info(
                    'Changing pullspec %r to %r in the bundle %r',
                    pullspec['_old'],
                    pullspec['new'],
                    bundle_data['nvr'],
                )
                to_rebuild_digests.add(digest)

        if not to_rebuild_digests:
            msg = self._no_bundle_prefix + "No bundle images to rebuild for " \
                                           f"advisory {self.event.advisory.name}"
            self.log_info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            db.session.commit()
            return []

        bundles_to_rebuild = list(
            map(lambda x: bundles_by_digest[x], to_rebuild_digests))
        return bundles_to_rebuild

    def _handle_manual_rebuild(self, db_event):
        """
        Handle manual rebuild submitted by Release Driver for an advisory created by Botas

        :param db_event: database event that represents a rebuild event
        :rtype: list
        :return: list of advisories that should be rebuilt
        """
        old_to_new_pullspec_map = self._get_pullspecs_mapping()

        if not old_to_new_pullspec_map:
            msg = self._no_bundle_prefix + 'None of the bundle images have ' \
                                           'applicable pullspecs to replace'
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)
        rebuild_nvr_to_pullspecs_map = dict()
        # compare replaced pullspecs with pullspecs in 'container_images' and
        # create map for bundles that should be rebuilt with their nvrs
        for container_image_nvr in self.event.container_images:
            artifact_build = db.session.query(ArtifactBuild).filter(
                ArtifactBuild.rebuilt_nvr == container_image_nvr,
                ArtifactBuild.type == ArtifactType.IMAGE.value,
            ).one_or_none()
            pullspecs = []
            # Try to find build in FM database, if it's not there check in Brew
            if artifact_build:
                pullspecs = artifact_build.bundle_pullspec_overrides[
                    "pullspec_replacements"]
            else:
                # Fetch buildinfo from Koji
                buildinfo = koji_api.get_build(container_image_nvr)
                # Get the original pullspecs
                pullspecs = (buildinfo.get('extra', {}).get('image', {}).get(
                    'operator_manifests', {}).get('related_images',
                                                  {}).get('pullspecs', []))

            for pullspec in pullspecs:
                if pullspec.get('new') not in old_to_new_pullspec_map:
                    continue
                # use newer pullspecs in the image
                pullspec['new'] = old_to_new_pullspec_map[pullspec['new']]
                rebuild_nvr_to_pullspecs_map[container_image_nvr] = pullspecs

        if not rebuild_nvr_to_pullspecs_map:
            msg = self._no_bundle_prefix + 'None of the container images have ' \
                                           'applicable pullspecs from the input bundle images'
            log.info(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # list with metadata about every bundle to do rebuild
        to_rebuild_bundles = []
        # fill 'append' and 'update' fields for bundles to rebuild
        for nvr, pullspecs in rebuild_nvr_to_pullspecs_map.items():
            bundle_digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if bundle_digest is not None:
                bundles = self._pyxis.get_bundles_by_digest(bundle_digest)
                temp_bundle = bundles[0]
                csv_updates = (self._get_csv_updates(temp_bundle['csv_name'],
                                                     temp_bundle['version']))
                to_rebuild_bundles.append({
                    'nvr': nvr,
                    'update': csv_updates['update'],
                    'pullspec_replacements': pullspecs,
                })
            else:
                log.warning('Can\'t find manifest_list_digest for bundle '
                            f'"{nvr}" in Pyxis')

        if not to_rebuild_bundles:
            msg = 'Can\'t find digests for any of the bundles to rebuild'
            log.warning(msg)
            db_event.transition(EventState.FAILED, msg)
            return []

        return to_rebuild_bundles

    def _get_pullspecs_mapping(self):
        """
        Get map of all replaced pullspecs from 'bundle_images' provided in an event.

        :rtype: dict
        :return: map of all '_old' pullspecs that was replaced by 'new'
            pullspecs in previous Freshmaker rebuilds
        """
        old_to_new_pullspec_map = dict()
        for bundle_nvr in self.event.bundle_images:
            artifact_build = db.session.query(ArtifactBuild).filter(
                ArtifactBuild.rebuilt_nvr == bundle_nvr,
                ArtifactBuild.type == ArtifactType.IMAGE.value,
            ).one_or_none()
            if artifact_build is None:
                log.warning(
                    f'Can\'t find build for a bundle image "{bundle_nvr}"')
                continue
            pullspec_overrides = artifact_build.bundle_pullspec_overrides
            for pullspec in pullspec_overrides['pullspec_replacements']:
                old_pullspec = pullspec.get('_old', None)
                if old_pullspec is None:
                    continue
                old_to_new_pullspec_map[old_pullspec] = pullspec['new']

        return old_to_new_pullspec_map

    @classmethod
    def _get_csv_updates(cls, csv_name, version):
        """
        Determine the CSV updates required for the bundle image.

        :param str csv_name: the name field in the bundle's ClusterServiceVersion file
        :param str version: the version of the bundle image being rebuilt
        :return: a dictionary of the CSV updates needed
        :rtype: dict
        """
        csv_modifications = {}
        new_version, fm_suffix = cls._get_rebuild_bundle_version(version)
        new_csv_name = cls._get_csv_name(csv_name, version, new_version,
                                         fm_suffix)
        csv_modifications['update'] = {
            'metadata': {
                # Update the name of the CSV to something uniquely identify the rebuild
                'name': new_csv_name,
                # Declare that this rebuild is a substitute of the bundle being rebuilt
                'annotations': {
                    'olm.substitutesFor': version
                }
            },
            'spec': {
                # Update the version of the rebuild to be unique and a newer version than the
                # the version of the bundle being rebuilt
                'version': new_version,
            }
        }

        return csv_modifications

    @classmethod
    def _get_rebuild_bundle_version(cls, version):
        """
        Get a bundle version for the Freshmaker rebuild of the bundle image.

        Examples:
            1.2.3 => 1.2.3+0.$timestamp.p (no build ID and not a rebuild)
            1.2.3+48273 => 1.2.3+48273.0.$timestamp.p (build ID and not a rebuild)
            1.2.3+48273.0.1616457250.p => 1.2.3+48273.0.$timestamp.p (build ID and a rebuild)

        :param str version: the version of the bundle image being rebuilt
        :return: a tuple of the bundle version of the Freshmaker rebuild of the bundle image and
            the suffix that was added by Freshmaker
        :rtype: tuple(str, str)
        """
        parsed_version = semver.VersionInfo.parse(version)
        # Strip off the microseconds of the timestamp
        timestamp = int(datetime.utcnow().timestamp())
        new_fm_suffix = f'0.{timestamp}.p'
        if parsed_version.build:
            # Check if the bundle was a Freshmaker rebuild. Include .patched
            # for backwards compatibility with the old suffix.
            fm_suffix_search = re.search(
                r'(?P<fm_suffix>0\.\d+\.(?:p|patched))$', parsed_version.build)
            if fm_suffix_search:
                fm_suffix = fm_suffix_search.groupdict()['fm_suffix']
                # Get the build without the Freshmaker suffix. This may include a build ID
                # from the original build before Freshmaker rebuilt it or be empty.
                build_wo_fm_suffix = parsed_version.build[:-len(fm_suffix)]
                new_build = f"{build_wo_fm_suffix}{new_fm_suffix}"
            else:
                # This was not previously rebuilt by Freshmaker so just append the suffix
                # to the existing build ID with '.' separating it.
                new_build = f"{parsed_version.build}.{new_fm_suffix}"
        else:
            # If there is existing build ID, then make the Freshmaker suffix the build ID
            new_build = new_fm_suffix

        # Don't use the replace method in order to support semver 2.8.1
        new_version_dict = parsed_version._asdict()
        new_version_dict["build"] = new_build
        new_version = str(semver.VersionInfo(**new_version_dict))

        return new_version, new_fm_suffix

    @staticmethod
    def _get_csv_name(csv_name, version, rebuild_version, fm_suffix):
        """
        Get a bundle CSV name for the Freshmaker rebuild of the bundle image.

        :param str csv_name: the name of the ClusterServiceVersion (CSV) file of the bundle image
        :param str version: the version of the bundle image being rebuilt
        :param str rebuild_version: the new version being assigned by Freshmaker for the rebuild
        :param str fm_suffix: the portion of rebuild_version that was generated by Freshmaker
        :return: the bundle ClusterServiceVersion (CSV) name of the Freshmaker rebuild of the bundle
            image
        :rtype: str
        """
        # The CSV name must be in the format of a valid DNS name, which means the + from the
        # build ID must be replaced. In the event this was a previous Freshmaker rebuild, version
        # may have a build ID that would be the DNS safe version in the CSV name.
        dns_safe_version = version.replace('+', '-')
        if dns_safe_version in csv_name:
            dns_safe_rebuild_version = rebuild_version.replace('+', '-')
            return csv_name.replace(dns_safe_version, dns_safe_rebuild_version)
        else:
            return f'{csv_name}.{fm_suffix}'

    def get_published_original_nvr(self, rebuilt_nvr):
        """
        Search for an original build, that has been built and published to a
            repository, and get original_nvr from it

        :param str rebuilt_nvr: rebuilt NVR to look build by
        :rtype: str or None
        :return: original NVR from the first published FM build for given NVR
        """
        original_nvr = None
        # artifact build should be only one in database, or raise an error
        artifact_build = db.session.query(ArtifactBuild).filter(
            ArtifactBuild.rebuilt_nvr == rebuilt_nvr,
            ArtifactBuild.type == ArtifactType.IMAGE.value,
        ).one_or_none()
        # recursively search for original artifact build
        if artifact_build is not None:
            original_nvr = artifact_build.original_nvr

            # check if image is published
            request_params = {'include': 'data.repositories', 'page_size': 1}
            images = self._pyxis._pagination(f'images/nvr/{original_nvr}',
                                             request_params)
            if not images:
                return None
            # stop recursion if the image is published in some repo
            if any(repo['published']
                   for repo in images[0].get('repositories')):
                return original_nvr

            next_nvr = self.get_published_original_nvr(original_nvr)
            if next_nvr is not None:
                original_nvr = next_nvr

        return original_nvr

    def image_has_auto_rebuild_tag(self, image):
        """ Check if image has a tag enabled for auto rebuild.

        :param dict image: Dict representation of an image entity in Pyxis.
        :rtype: bool
        :return: True if image has a tag enabled for auto rebuild in repository, otherwise False.
        """
        for repo in image['repositories']:
            # Skip unpublished repository
            if not repo['published']:
                continue

            auto_rebuild_tags = self._pyxis.get_auto_rebuild_tags(
                repo['registry'], repo['repository'])
            tags = [t['name'] for t in repo.get('tags', [])]
            if set(auto_rebuild_tags) & set(tags):
                return True

        # It'd be more efficient to do this check first, but the exceptions are edge cases
        # (e.g. testing) and it's best to not use it unless absolutely necessary
        nvr = image['brew']['build']
        parsed_nvr = parse_nvr(nvr)
        nv = f'{parsed_nvr["name"]}-{parsed_nvr["version"]}'
        if nv in conf.bundle_autorebuild_tag_exceptions:
            self.log_info(
                'The bundle %r has an exception for being tagged with an auto-rebuild tag',
                nvr)
            return True

        return False

    def _create_original_to_rebuilt_nvrs_map(self):
        """
        Creates mapping of original build NVRs to rebuilt NVRs in advisory.
        Including NVRs of the builds from the blocking advisories

        :rtype: dict
        :return: map of the original NVRs as keys and rebuilt NVRs as values
        """
        nvrs_mapping = {}

        # Get builds from all blocking advisories
        blocking_advisories_builds = \
            Errata().get_blocking_advisories_builds(self.event.advisory.errata_id)
        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build
        for product_info in self.event.advisory.builds.values():
            for build in product_info['builds']:
                # Each build is a one key/value pair, and key is the build NVR
                build_nvr = next(iter(build))

                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build_nvr)
                if original_nvr is None:
                    continue
                nvrs_mapping[original_nvr] = build_nvr
                parsed_build_nvr = parse_nvr(build_nvr)

                # Check builds from blocking advisories and add to the mapping
                # all of them, that have overlapping package names
                for block_build in blocking_advisories_builds:
                    block_build_nvr = parse_nvr(block_build)
                    if block_build_nvr['name'] == parsed_build_nvr['name'] and \
                            block_build_nvr['version'] == parsed_build_nvr['version']:
                        nvrs_mapping[block_build] = build_nvr
        return nvrs_mapping

    def _prepare_builds(self, db_event, to_rebuild_bundles):
        """
        Prepare models.ArtifactBuild instance for every bundle that will be
        rebuilt

        :param models.Event db_event: database event that will contain builds
        :param list to_rebuild_bundles: bundles to rebuild
        :return: builds that already in database and ready to be submitted to brew
        :rtype: list
        """
        builds = []
        csv_mod_url = conf.freshmaker_root_url + "/api/2/pullspec_overrides/{}"
        for bundle in to_rebuild_bundles:
            # Reset context to db_event for each iteration before
            # the ArtifactBuild is created.
            self.set_context(db_event)

            rebuild_reason = RebuildReason.DIRECTLY_AFFECTED.value
            bundle_name = koji.parse_NVR(bundle["nvr"])["name"]

            build = self.record_build(db_event,
                                      bundle_name,
                                      ArtifactType.IMAGE,
                                      state=ArtifactBuildState.PLANNED.value,
                                      original_nvr=bundle["nvr"],
                                      rebuild_reason=rebuild_reason)

            # Set context to particular build so logging shows this build
            # in case of error.
            self.set_context(build)

            build.transition(ArtifactBuildState.PLANNED.value, "")

            additional_data = ContainerImage.get_additional_data_from_koji(
                bundle["nvr"])
            build.build_args = json.dumps({
                "repository":
                additional_data["repository"],
                "commit":
                additional_data["commit"],
                "target":
                additional_data["target"],
                "branch":
                additional_data["git_branch"],
                "arches":
                additional_data["arches"],
                # The build system always enforces that bundle images build from
                # "scratch", so there is no parent image. See:
                # https://osbs.readthedocs.io/en/latest/users.html?#operator-manifest-bundle-builds
                "original_parent":
                None,
                "operator_csv_modifications_url":
                csv_mod_url.format(build.id),
            })
            build.bundle_pullspec_overrides = {
                "pullspec_replacements": bundle["pullspec_replacements"],
                "update": bundle["update"],
            }

            db.session.commit()
            builds.append(build)
        return builds
class HandleBotasAdvisory(ContainerBuildHandler):
    """
    Handles event that was created by transition of an advisory filed by
    BOTAS to SHIPPED_LIVE state
    """
    name = "HandleBotasAdvisory"

    def __init__(self, pyxis=None):
        super().__init__()
        if pyxis:
            self._pyxis = pyxis
        else:
            if not conf.pyxis_server_url:
                raise ValueError("'pyxis_server_url' parameter should be set")
            self._pyxis = Pyxis(conf.pyxis_server_url)

    def can_handle(self, event):
        if (isinstance(event, BotasErrataShippedEvent) and
                'docker' in event.advisory.content_types):
            return True

        return False

    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build

        # Mapping of original build nvrs to rebuilt nvrs in advisory
        nvrs_mapping = {}
        for product_info in event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                nvrs_mapping[original_nvr] = build['nvr']

        original_nvrs = nvrs_mapping.keys()
        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))

        # Get image manifest_list_digest for all original images, manifest_list_digest is used
        # in pullspecs in bundle's related images
        original_digests_by_nvr = {}
        original_nvrs_by_digest = {}
        for nvr in original_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                original_digests_by_nvr[nvr] = digest
                original_nvrs_by_digest[digest] = nvr
            else:
                log.warning(
                    f"Image manifest_list_digest not found for original image {nvr} in Pyxis, "
                    "skip this image"
                )

        if not original_digests_by_nvr:
            msg = f"None of the original images have digests in Pyxis: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Get image manifest_list_digest for all rebuilt images, manifest_list_digest is used
        # in pullspecs of bundle's related images
        rebuilt_digests_by_nvr = {}
        rebuilt_nvrs = nvrs_mapping.values()
        for nvr in rebuilt_nvrs:
            digest = self._pyxis.get_manifest_list_digest_by_nvr(nvr)
            if digest:
                rebuilt_digests_by_nvr[nvr] = digest
            else:
                log.warning(
                    f"Image manifest_list_digest not found for rebuilt image {nvr} in Pyxis, "
                    "skip this image"
                )

        if not rebuilt_digests_by_nvr:
            msg = f"None of the rebuilt images have digests in Pyxis: {','.join(rebuilt_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        # A set of unique bundle digests
        bundle_digests = set()

        # get bundle digests for original images
        bundle_digests_by_related_nvr = {}
        for image_nvr, image_digest in original_digests_by_nvr.items():
            bundles = self._pyxis.get_bundles_by_related_image_digest(
                image_digest, all_bundles
            )
            if not bundles:
                log.info(f"No latest bundle image with the related image of {image_nvr}")
                continue

            for bundle in bundles:
                bundle_digest = bundle['bundle_path_digest']
                bundle_digests.add(bundle_digest)
                bundle_digests_by_related_nvr.setdefault(image_nvr, []).append(bundle_digest)

        if not bundle_digests_by_related_nvr:
            msg = "None of the original images have related bundles, skip."
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        # Mapping of bundle digest to bundle data
        # {
        #     digest: {
        #         "images": [image_amd64, image_aarch64],
        #         "nvr": NVR,
        #         "auto_rebuild": True/False,
        #         "osbs_pinning": True/False,
        #         "pullspecs": [...],
        #     }
        # }
        bundles_by_digest = {}
        default_bundle_data = {
            'images': [],
            'nvr': None,
            'auto_rebuild': False,
            'osbs_pinning': False,
            'pullspecs': [],
        }

        # Get images for each bundle digest, a bundle digest can have multiple images
        # with different arches.
        for digest in bundle_digests:
            bundles = self._pyxis.get_images_by_digest(digest)
            # If no bundle image found, just skip this bundle digest
            if not bundles:
                continue

            bundles_by_digest.setdefault(digest, copy.deepcopy(default_bundle_data))
            bundles_by_digest[digest]['nvr'] = bundles[0]['brew']['build']
            bundles_by_digest[digest]['images'] = bundles

        # Unauthenticated koji session to fetch build info of bundles
        koji_api = KojiService(conf.koji_profile)

        # For each bundle, check whether it should be rebuilt by comparing the
        # auto_rebuild_tags of repository and bundle's tags
        for digest, bundle_data in bundles_by_digest.items():
            bundle_nvr = bundle_data['nvr']

            # Images are for different arches, just check against the first image
            image = bundle_data['images'][0]
            if self.image_has_auto_rebuild_tag(image):
                bundle_data['auto_rebuild'] = True

            # Fetch buildinfo
            buildinfo = koji_api.get_build(bundle_nvr)
            related_images = (
                buildinfo.get("extra", {})
                .get("image", {})
                .get("operator_manifests", {})
                .get("related_images", {})
            )
            bundle_data['osbs_pinning'] = related_images.get('created_by_osbs', False)
            # Save the original pullspecs
            bundle_data['pullspecs'] = related_images.get('pullspecs', [])

        # Digests of bundles to be rebuilt
        to_rebuild_digests = set()

        # Now for each bundle, replace the original digest with rebuilt
        # digest (override pullspecs)
        for digest, bundle_data in bundles_by_digest.items():
            # Override pullspecs only when auto_rebuild is enabled and OSBS-pinning
            # mechanism is used.
            if not (bundle_data['auto_rebuild'] and bundle_data['osbs_pinning']):
                continue

            for pullspec in bundle_data['pullspecs']:
                # A pullspec item example:
                # {
                #   'new': 'registry.exampe.io/repo/example-operator@sha256:<sha256-value>'
                #   'original': 'registry.example.io/repo/example-operator:v2.2.0',
                #   'pinned': True
                # }

                # If related image is not pinned by OSBS, skip
                if not pullspec.get('pinned', False):
                    continue

                # A pullspec path is in format of "registry/repository@digest"
                pullspec_elems = pullspec.get('new').split('@')
                old_digest = pullspec_elems[1]

                if old_digest not in original_nvrs_by_digest:
                    # This related image is not one of the original images
                    continue

                # This related image is one of our original images
                old_nvr = original_nvrs_by_digest[old_digest]
                new_nvr = nvrs_mapping[old_nvr]
                new_digest = rebuilt_digests_by_nvr[new_nvr]

                # Replace the old digest with new digest
                pullspec_elems[1] = new_digest
                new_pullspec = '@'.join(pullspec_elems)
                pullspec['new'] = new_pullspec

                # Once a pullspec in this bundle has been overrided, add this bundle
                # to rebuild list
                to_rebuild_digests.add(digest)

        # Skip that event because we can't proceed with processing it.
        # TODO
        # Now when we have bundle images' nvrs we can procceed with rebuilding it
        msg = f"Skipping the rebuild of {len(to_rebuild_digests)} bundle images " \
              "due to being blocked on further implementation for now."
        db_event.transition(EventState.SKIPPED, msg)
        return []

    def get_published_original_nvr(self, rebuilt_nvr):
        """
        Search for an original build, that has been built and published to a
            repository, and get original_nvr from it

        :param str rebuilt_nvr: rebuilt NVR to look build by
        :rtype: str or None
        :return: original NVR from the first published FM build for given NVR
        """
        original_nvr = None
        # artifact build should be only one in database, or raise an error
        artifact_build = db.session.query(ArtifactBuild).filter(
            ArtifactBuild.rebuilt_nvr == rebuilt_nvr,
            ArtifactBuild.type == ArtifactType.IMAGE.value,
        ).one_or_none()
        # recursively search for original artifact build
        if artifact_build is not None:
            original_nvr = artifact_build.original_nvr

            # check if image is published
            request_params = {'include': 'data.repositories',
                              'page_size': 1}
            images = self._pyxis._pagination(f'images/nvr/{original_nvr}',
                                             request_params)
            if not images:
                return None
            # stop recursion if the image is published in some repo
            if any(repo['published'] for repo in images[0].get('repositories')):
                return original_nvr

            next_nvr = self.get_published_original_nvr(original_nvr)
            if next_nvr is not None:
                original_nvr = next_nvr

        return original_nvr

    def image_has_auto_rebuild_tag(self, image):
        """ Check if image has a tag enabled for auto rebuild.

        :param dict image: Dict representation of an image entity in Pyxis.
        :rtype: bool
        :return: True if image has a tag enabled for auto rebuild in repository, otherwise False.
        """
        for repo in image['repositories']:
            # Skip unpublished repository
            if not repo['published']:
                continue

            auto_rebuild_tags = self._pyxis.get_auto_rebuild_tags(
                repo['registry'], repo['repository']
            )
            tags = [t['name'] for t in repo.get('tags', [])]
            if set(auto_rebuild_tags) & set(tags):
                return True
        return False
class HandleBotasAdvisory(ContainerBuildHandler):
    """
    Handles event that was created by transition of an advisory filed by
    BOTAS to SHIPPED_LIVE state
    """
    name = "HandleBotasAdvisory"

    def __init__(self, pyxis=None):
        super().__init__()
        if pyxis:
            self._pyxis = pyxis
        else:
            if not conf.pyxis_server_url:
                raise ValueError("'pyxis_server_url' parameter should be set")
            self._pyxis = Pyxis(conf.pyxis_server_url)

    def can_handle(self, event):
        if (isinstance(event, BotasErrataShippedEvent)
                and 'docker' in event.advisory.content_types):
            return True

        return False

    def handle(self, event):
        if event.dry_run:
            self.force_dry_run()
        self.event = event

        db_event = Event.get_or_create_from_event(db.session, event)

        self.set_context(db_event)

        # Check if event is allowed by internal policies
        if not self.event.is_allowed(self):
            msg = ("This image rebuild is not allowed by internal policy. "
                   f"message_id: {event.msg_id}")
            db_event.transition(EventState.SKIPPED, msg)
            self.log_info(msg)
            return []

        # Get builds NVRs from the advisory attached to the message/event and
        # then get original NVR for every build
        original_nvrs = set()
        for product_info in event.advisory.builds.values():
            for build in product_info['builds']:
                # Search for the first build that triggered the chain of rebuilds
                # for every shipped NVR to get original NVR from it
                original_nvr = self.get_published_original_nvr(build['nvr'])
                if original_nvr is None:
                    continue
                original_nvrs.add(original_nvr)

        self.log_info(
            "Orignial nvrs of build in the advisory #{0} are: {1}".format(
                event.advisory.errata_id, " ".join(original_nvrs)))
        # Get images by nvrs and then get their digests
        original_images_digests = self._pyxis.get_digests_by_nvrs(
            original_nvrs)
        if not original_images_digests:
            msg = f"There are no digests for NVRs: {','.join(original_nvrs)}"
            log.warning(msg)
            db_event.transition(EventState.SKIPPED, msg)
            return []

        index_images = self._pyxis.get_operator_indices()
        # get latest bundle images per channel per index image filtered
        # by the highest semantic version
        all_bundles = self._pyxis.get_latest_bundles(index_images)

        bundles = self._pyxis.filter_bundles_by_related_image_digests(
            original_images_digests, all_bundles)
        bundle_digests = set()
        for bundle in bundles:
            if not bundle.get('bundle_path_digest'):
                log.warning("Bundle %s doesn't have 'bundle_path_digests' set",
                            bundle['bundle_path'])
                continue
            bundle_digests.add(bundle['bundle_path_digest'])
        bundle_images = self._pyxis.get_images_by_digests(bundle_digests)

        # Filter image nvrs that don't have or never had auto_rebuild tag
        # in repos, where image is published
        auto_rebuild_nvrs = self._pyxis.get_auto_rebuild_tagged_images(
            bundle_images)

        # get NVRs only of those bundles, which have OSBS pinning
        bundles_nvrs = self._filter_bundles_by_pinned_related_images(
            auto_rebuild_nvrs)

        # Skip that event because we can't proceed with processing it.
        # TODO
        # Now when we have bundle images' nvrs we can procceed with rebuilding it
        msg = f"Skipping the rebuild of {len(bundles_nvrs)} bundle images " \
              "due to being blocked on further implementation for now."
        db_event.transition(EventState.SKIPPED, msg)
        return []

    def _filter_bundles_by_pinned_related_images(self, bundle_image_nvrs):
        """
        If the digests were not pinned by OSBS, the bundle image nvr
        will be filtered out.

        There is no need in checking pinning for every of related images,
        because we already know that digest points to the manifest list,
        because of previous filtering.

        :param set bundle_image_nvrs: NVRs of operator bundles
        :return: list of NVRs of bundle images that have at least one
            original related image that was rebuilt
        """
        ret_bundle_images_nvrs = set()
        with koji_service(conf.koji_profile,
                          log,
                          dry_run=self.dry_run,
                          login=False) as session:
            for nvr in bundle_image_nvrs:
                build = session.get_build(nvr)
                if not build:
                    log.error("Could not find the build %s in Koji", nvr)
                    continue
                related_images = (build.get("build", {}).get("extra", {}).get(
                    "image", {}).get("operator_manifests",
                                     {}).get("related_images", {}))

                # Skip the bundle if the related images section was not populated by OSBS
                if related_images.get("created_by_osbs") is not True:
                    continue
                ret_bundle_images_nvrs.add(nvr)
        return ret_bundle_images_nvrs

    def get_published_original_nvr(self, rebuilt_nvr):
        """
        Search for an original build, that has been built and published to a
            repository, and get original_nvr from it

        :param str rebuilt_nvr: rebuilt NVR to look build by
        :rtype: str or None
        :return: original NVR from the first published FM build for given NVR
        """
        original_nvr = None
        # artifact build should be only one in database, or raise an error
        artifact_build = db.session.query(ArtifactBuild).filter(
            ArtifactBuild.rebuilt_nvr == rebuilt_nvr,
            ArtifactBuild.type == ArtifactType.IMAGE.value,
        ).one_or_none()
        # recursively search for original artifact build
        if artifact_build is not None:
            original_nvr = artifact_build.original_nvr

            # check if image is published
            request_params = {'include': 'data.repositories', 'page_size': 1}
            images = self._pyxis._pagination(f'images/nvr/{original_nvr}',
                                             request_params)
            if not images:
                return None
            # stop recursion if the image is published in some repo
            if any(repo['published']
                   for repo in images[0].get('repositories')):
                return original_nvr

            next_nvr = self.get_published_original_nvr(original_nvr)
            if next_nvr is not None:
                original_nvr = next_nvr

        return original_nvr
Exemple #9
0
class TestQueryPyxis(helpers.FreshmakerTestCase):
    def setUp(self):
        super().setUp()

        self.patcher = helpers.Patcher(
            'freshmaker.pyxis.')

        self.fake_server_url = 'https://pyxis.localhost/'
        self.px = Pyxis(self.fake_server_url)
        self.response = create_autospec(requests.Response)
        self.response.status_code = HTTPStatus.OK
        self.bad_requests_response = {
            "detail": [
                "Unable to parse the filter from URL.",
                "Please verify the 'Field Name' in the RSQL Expression.",
                "Please visit the following end-point for more details:",
                "    /v1/docs/filtering-language"
            ],
            "status": 400,
            "title": "Bad Request",
            "type": "about:blank"
        }

        self.empty_response_page = {
            "data": [],
            "page": 0,
            "page_size": 100,
            "total": 0
        }

        self.indices = [
            {
                "_id": "1",
                "created_by": "meteor",
                "creation_date": "2020-01-01T09:32:31.692000+00:00",
                "last_update_date": "2020-01-01T09:32:31.692000+00:00",
                "last_updated_by": "meteor",
                "ocp_version": "4.5",
                "organization": "org",
                "path": "path/to/registry:v4.5"
            },
            {
                "_id": "2",
                "created_by": "meteor",
                "creation_date": "2020-01-01T09:32:38.486000+00:00",
                "last_update_date": "2020-01-01T09:32:38.486000+00:00",
                "last_updated_by": "meteor",
                "ocp_version": "4.6",
                "organization": "org",
                "path": "path/to/registry:v4.6"
            },
            {
                "_id": "2",
                "created_by": "meteor",
                "creation_date": "2020-01-01T09:32:38.486000+00:00",
                "last_update_date": "2020-01-01T09:32:38.486000+00:00",
                "last_updated_by": "meteor",
                "ocp_version": "4.6",
                "organization": "org",
                "path": ""
            }
        ]

        self.bundles = [
            {
                "channel_name": "streams-1.5.x",
                "related_images": [
                    {
                        "image": "registry/amq7/amq-streams-r-operator@sha256:111",
                        "name": "strimzi-cluster-operator",
                        "digest": "sha256:111"
                    },
                    {
                        "image": "registry/amq7/amq-streams-kafka-24-r@sha256:222",
                        "name": "strimzi-kafka-24",
                        "digest": "sha256:222"
                    },
                    {
                        "image": "registry/amq7/amq-streams-kafka-25-r@sha256:333",
                        "name": "strimzi-kafka-25",
                        "digest": "sha256:333"
                    },
                    {
                        "image": "registry/amq7/amq-streams-bridge-r@sha256:444",
                        "name": "strimzi-bridge",
                        "digest": "sha256:444"
                    }
                ],
                "version": "1.5.3"
            },
            {
                "channel_name": "streams-1.5.x",
                "related_images": [
                    {
                        "image": "registry/amq7/amq-streams-r-operator@sha256:555",
                        "name": "strimzi-cluster-operator",
                        "digest": "sha256:555"
                    },
                    {
                        "image": "registry/amq7/amq-streams-kafka-24-r@sha256:666",
                        "name": "strimzi-kafka-24",
                        "digest": "sha256:666"
                    },
                    {
                        "image": "registry/amq7/amq-streams-kafka-25-r@sha256:777",
                        "name": "strimzi-kafka-25",
                        "digest": "sha256:777"
                    },
                    {
                        "image": "registry/amq7/amq-streams-bridge-r@sha256:888",
                        "name": "strimzi-bridge",
                        "digest": "sha256:888"
                    }
                ],
                "version": "1.5.4"
            },
            {
                "channel_name": "stable",
                "related_images": [
                    {
                        "image": "registry/amq7/amq--operator@sha256:999",
                        "name": "strimzi-cluster-operator",
                        "digest": "sha256:999"
                    },
                    {
                        "image": "registry/amq7/kafka-24-r@sha256:aaa",
                        "name": "strimzi-kafka-24",
                        "digest": "sha256:aaa"
                    },
                    {
                        "image": "registry/amq7/kafka-25-r@sha256:bbb",
                        "name": "strimzi-kafka-25",
                        "digest": "sha256:bbb"
                    },
                    {
                        "image": "registry/amq7/amq-streams-bridge-r@sha256:ccc",
                        "name": "strimzi-bridge",
                        "digest": "sha256:ccc"
                    }
                ],
                "version": "1.5.3"
            },
            {
                "channel_name": "stable",
                "related_images": [
                    {
                        "image": "registry/tracing/j-operator:1.13.2",
                        "name": "j-1.13.2-annotation",
                        "digest": "sha256:fff"
                    },
                    {
                        "image": "registry/tracing/j-operator:1.13.2",
                        "name": "j-operator",
                        "digest": "sha256:ffff"
                    }
                ],
                "version": "1.5.2"
            },
            {
                "channel_name": "quay-v3.3",
                "related_images": [
                    {
                        "image": "registry/quay/quay-operator@sha256:ddd",
                        "name": "quay-operator-annotation",
                        "digest": "sha256:ddd"
                    },
                    {
                        "image": "registry/quay/quay-security-r-operator@sha256:eee",
                        "name": "container-security-operator",
                        "digest": "sha256:eee"
                    }
                ],
                "version": "3.3.1"
            },
        ]

        self.images = [
            {
                "brew": {
                    "build": "s2i-1-2",
                    "completion_date": "2020-08-12T11:31:39+00:00",
                    "nvra": "s2i-1-2.ppc64le",
                    "package": "s2i-core-container"
                },
                "repositories": [
                    {
                        "manifest_list_digest": "sha256:1111",
                        "published": False,
                        "registry": "reg1",
                        "repository": "repo1",
                        "tags": [{"name": "tag0"}]
                    },
                    {
                        "manifest_list_digest": "sha256:1112",
                        "published": True,
                        "registry": "reg2",
                        "repository": "repo2",
                        "tags": [{"name": "tag1"}, {"name": "tag2"}]
                    }
                ]
            },
            {
                "brew": {
                    "build": "s2i-1-2",
                    "completion_date": "2020-08-12T11:31:39+00:00",
                    "nvra": "s2i-1-2.s390x",
                    "package": "s2i-core-container"
                },
                "repositories": [
                    {
                        "manifest_list_digest": "sha256:2222",
                        "published": True,
                        "registry": "reg2",
                        "repository": "repo2",
                        "tags": [{"name": "tag2"}]
                    }
                ]
            },
            {
                "brew": {
                    "build": "s2i-1-2",
                    "completion_date": "2020-08-12T11:31:39+00:00",
                    "nvra": "s2i-1-2.amd64",
                    "package": "s2i-core-container"
                },
                "repositories": [
                    {
                        "manifest_list_digest": "sha256:3333",
                        "published": True,
                        "registry": "reg3",
                        "repository": "repo3",
                        "tags": [{"name": "latest"}]
                    }
                ]
            },
            {
                "brew": {
                    "build": "s2i-1-2",
                    "completion_date": "2020-08-12T11:31:39+00:00",
                    "nvra": "s2i-1-2.arm64",
                    "package": "s2i-core-container"
                },
                "repositories": [
                    {
                        "manifest_list_digest": "sha256:4444",
                        "published": True,
                        "registry": "reg4",
                        "repository": "repo4",
                        "tags": [{"name": "tag1"}]
                    }
                ]
            }
        ]

    def tearDown(self):
        super().tearDown()
        self.patcher.unpatch_all()

    @staticmethod
    def copy_call_args(mock):
        """
        Copy args of Mock to another Mock so we can check call args if we call
        mock with mutable args and change it between calls
        """
        new_mock = Mock()

        def side_effect(*args, **kwargs):
            args = deepcopy(args)
            kwargs = deepcopy(kwargs)
            return new_mock(*args, **kwargs)
        mock.side_effect = side_effect
        return new_mock

    @patch('freshmaker.pyxis.HTTPKerberosAuth')
    @patch('freshmaker.pyxis.requests.get')
    def test_make_request(self, get, auth):
        get.return_value = self.response
        test_params = {'key1': 'val1'}
        self.px._make_request('test', test_params)

        get_url = self.fake_server_url + 'v1/test'
        self.response.json.assert_called_once()
        test_params['page_size'] = "100"
        get.assert_called_once_with(get_url, params=test_params, auth=auth(),
                                    timeout=conf.net_timeout)

    @patch('freshmaker.pyxis.HTTPKerberosAuth')
    @patch('freshmaker.pyxis.requests.get')
    def test_make_request_error(self, get, auth):
        get.return_value = self.response
        self.response.ok = False
        self.response.json.side_effect = ValueError
        self.response.json.text = 'test message'
        self.response.request = Mock()
        self.response.request.url = 'test/url'

        with self.assertRaises(PyxisRequestError, msg='test message'):
            self.px._make_request('test', {})

    @patch('freshmaker.pyxis.HTTPKerberosAuth')
    @patch('freshmaker.pyxis.Pyxis._make_request')
    def test_pagination(self, request, auth):
        my_request = self.copy_call_args(request)
        my_request.side_effect = [
            {"page": 0, "data": ["fake_data1"]},
            {"page": 1, "data": ["fake_data2"]},
            {"page": 2, "data": []}
        ]
        test_params = {'include': ['total', 'field1']}
        entity = 'test'
        auth.return_value = 1
        self.px._pagination(entity, test_params)

        self.assertEqual(request.call_count, 3)
        default_params = {'page_size': '100', 'include': ['total', 'field1']}
        calls = [call('test', params={**default_params, 'page': 0}),
                 call('test', params={**default_params, 'page': 1}),
                 call('test', params={**default_params, 'page': 2})
                 ]
        my_request.assert_has_calls(calls)

    @patch.object(conf, 'pyxis_index_image_organization', new='org')
    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_operator_indices(self, page):
        self.px.get_operator_indices()
        page.assert_called_once_with(
            'operators/indices', {'filter': 'organization==org'})

    @patch.object(conf, "product_pages_api_url", new="http://pp.example.com/api")
    @patch("freshmaker.pyxis.Pyxis._pagination")
    def test_get_operator_indices_with_unreleased_filtered_out(self, page):
        pp_mock_data = [
            {
                "url": "http://pp.example.com/api/releases/openshift-4.5/schedule-tasks",
                "json": [{"name": "GA", "date_finish": "2020-02-05"}]
            },
            {
                "url": "http://pp.example.com/api/releases/openshift-4.6/schedule-tasks",
                "json": [{"name": "GA", "date_finish": "2020-05-23"}]
            },
            {
                "url": "http://pp.example.com/api/releases/openshift-4.8/schedule-tasks",
                "json": [{"name": "GA", "date_finish": "2021-08-12"}]
            }
        ]
        page.return_value = self.indices + [
            {
                "_id": "3",
                "created_by": "meteor",
                "creation_date": "2020-11-01T08:23:28.253000+00:00",
                "last_update_date": "2020-11-01T08:23:28.253000+00:00",
                "last_updated_by": "meteor",
                "ocp_version": "4.8",
                "organization": "org",
                "path": ""
            }
        ]
        now = datetime(year=2020, month=12, day=15, hour=0, minute=0, second=0)

        with requests_mock.Mocker() as http:
            for data in pp_mock_data:
                http.get(data["url"], json=data["json"])

            with freeze_time(now):
                indices = self.px.get_operator_indices()

        assert len(indices) == 3
        assert "4.8" not in [i["ocp_version"] for i in indices]

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_latest_bundles(self, page):
        page_copy = self.copy_call_args(page)
        # Ensure this one is ignored
        bad_version_bundle = {
            "channel_name": "test-v2.3",
            "related_images": [
                {
                    "image": "registry/quay/quay-operator@sha256:ddd",
                    "name": "quay-operator-annotation",
                    "digest": "sha256:ddd"
                },
                {
                    "image": "registry/quay/quay-security-r-operator@sha256:eee",
                    "name": "container-security-operator",
                    "digest": "sha256:eee"
                }
            ],
            "version": "version_me"
        }
        page_copy.side_effect = [self.bundles[:3] + [bad_version_bundle], []]

        out = self.px.get_latest_bundles(self.indices)
        expected_out = self.bundles[:3]

        self.assertEqual(out, expected_out)
        page_copy.assert_has_calls([
            call('operators/bundles',
                 {'include': 'data.channel_name,data.version,'
                             'data.related_images,data.bundle_path_digest,'
                             'data.bundle_path,data.csv_name',
                  'filter': 'latest_in_channel==true%20and%20'
                            'source_index_container_path==path/to/registry:v4.5'}),
            call('operators/bundles',
                 {'include': 'data.channel_name,data.version,'
                             'data.related_images,data.bundle_path_digest,'
                             'data.bundle_path,data.csv_name',
                  'filter': 'latest_in_channel==true%20and%20'
                            'source_index_container_path==path/to/registry:v4.6'}),
        ])

    @patch('freshmaker.pyxis.Pyxis._pagination')
    def test_get_manifest_list_digest_by_nvr(self, page):
        page.return_value = self.images
        digest = self.px.get_manifest_list_digest_by_nvr('s2i-1-2')

        expected_digest = 'sha256:1112'
        self.assertEqual(digest, expected_digest)
        page.assert_called_once_with(
            'images/nvr/s2i-1-2',
            {'include': 'data.brew,data.repositories'}
        )

    def test_get_bundles_by_related_image_digest(self):
        digest = 'sha256:111'
        new_bundles = self.px.get_bundles_by_related_image_digest(
            digest, self.bundles)

        expected_bundles = [self.bundles[0]]
        self.assertListEqual(new_bundles, expected_bundles)

    @patch('freshmaker.pyxis.requests.get')
    def test_get_images_by_digest(self, mock_get):
        image_1 = {
            'brew': {
                'build': 'foo-operator-2.1-2',
                'nvra': 'foo-operator-2.1-2.amd64',
                'package': 'foo',
            },
            'repositories': [
                {
                    'content_advisory_ids': [],
                    'manifest_list_digest': 'sha256:12345',
                    'manifest_schema2_digest': 'sha256:23456',
                    'published': True,
                    'registry': 'registry.example.com',
                    'repository': 'foo/foo-operator-bundle',
                    'tags': [{'name': '2'}, {'name': '2.1'}],
                }
            ],
        }
        fake_responses = [Mock(ok=True), Mock(ok=True)]
        fake_responses[0].json.return_value = {'data': [image_1]}
        fake_responses[1].json.return_value = {'data': []}
        mock_get.side_effect = fake_responses

        digest = 'sha256:23456'
        images = self.px.get_images_by_digest(digest)
        self.assertListEqual(images, [image_1])

    @patch('freshmaker.pyxis.requests.get')
    def test_get_auto_rebuild_tags(self, mock_get):
        mock_get.return_value = Mock(ok=True)
        mock_get.return_value.json.return_value = {
            '_links': {},
            'auto_rebuild_tags': [
                '2.3',
                'latest'
            ]
        }

        tags = self.px.get_auto_rebuild_tags('registry.example.com', 'foo/foo-operator-bundle')
        self.assertListEqual(tags, ['2.3', 'latest'])