def run(self): if self.error: return ecr_mirror = Image(self.ecr_uri, username=self.ecr_username, password=self.ecr_password) image = Image( self.instance["mirror"]["url"], username=self.image_username, password=self.image_password, ) LOG.debug("[checking %s -> %s]", image, ecr_mirror) for tag in image: if tag not in ecr_mirror: try: self.skopeo_cli.copy( src_image=image[tag], src_creds=self.image_auth, dst_image=ecr_mirror[tag], dest_creds=self.ecr_auth, ) except SkopeoCmdError as details: LOG.error("[%s]", details)
def process_sync_tasks(self): eight_hours = 28800 # 60 * 60 * 8 is_deep_sync = self._is_deep_sync(interval=eight_hours) summary = self.process_repos_query() sync_tasks = defaultdict(list) for org, data in summary.items(): for item in data: image = Image(f'quay.io/{org}/{item["name"]}') image_mirror = Image(item['mirror']) for tag in image_mirror: upstream = image_mirror[tag] downstream = image[tag] if tag not in image: _LOG.debug('Image %s and mirror %s are out off sync', downstream, upstream) sync_tasks[org].append({ 'mirror_url': str(upstream), 'image_url': str(downstream) }) continue # Deep (slow) check only in non dry-run mode if self.dry_run: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue # Deep (slow) check only from time to time if not is_deep_sync: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue try: if downstream == upstream: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue except ImageComparisonError as details: _LOG.error('[%s]', details) continue _LOG.debug('Image %s and mirror %s are out of sync', downstream, upstream) sync_tasks[org].append({ 'mirror_url': str(upstream), 'image_url': str(downstream) }) return sync_tasks
def test_no_cache(self, should_cache, getter): r = requests.Response() r.status_code = 200 r.headers['Docker-Content-Digest'] = 'sha256:asha' r._content = b'{"key": "value"}' i = Image(f"quay.io/foo/bar:latest", response_cache=None) getter.return_value = r m = i._get_manifest() assert m == r getter.assert_called_once_with( "https://quay.io/v2/foo/bar/manifests/latest") should_cache.assert_not_called()
def test_empty_cache_should_not_cache(self, should_cache, getter): should_cache.return_value = False i = Image(f"quay.io/foo/bar:latest", response_cache={}) r = requests.Response() r.status_code = 200 r.headers['Docker-Content-Digest'] = 'sha256:asha' r._content = b'{"key": "value"}' getter.return_value = r m = i._get_manifest() getter.assert_any_call("https://quay.io/v2/foo/bar/manifests/latest", requests.head) getter.assert_any_call("https://quay.io/v2/foo/bar/manifests/latest") assert m == r assert i.response_cache == {}
def _is_image_there(self, image): image_obj = Image(image) for registry, creds in self.registry_creds['auths'].items(): # Getting the credentials for the image_obj registry_obj = urlparse(registry) if registry_obj.netloc != image_obj.registry: continue image_obj.auth = (creds['username'], creds['password']) # Checking if the image is already # in the registry if image_obj: return True return False
def process_repos_query(self): result = self.gqlapi.query(self.QUAY_REPOS_QUERY) summary = defaultdict(list) for app in result['apps']: quay_repos = app.get('quayRepos') if quay_repos is None: continue for quay_repo in quay_repos: org = quay_repo['org']['name'] server_url = quay_repo['org'].get('serverUrl') or 'quay.io' for item in quay_repo['items']: if item['mirror'] is None: continue mirror_image = Image(item['mirror']['url']) if (mirror_image.registry == 'docker.io' and mirror_image.repository == 'library' and item['public']): _LOG.error( "Image %s can't be mirrored to a public " "quay repository.", mirror_image) continue summary[org].append({ 'name': item["name"], 'mirror': item['mirror'], 'server_url': server_url }) return summary
def process_repos_query(): apps = queries.get_quay_repos() summary = defaultdict(list) for app in apps: quay_repos = app.get('quayRepos') if quay_repos is None: continue for quay_repo in quay_repos: org = quay_repo['org']['name'] instance = quay_repo['org']['instance']['name'] server_url = quay_repo['org']['instance']['url'] for item in quay_repo['items']: if item['mirror'] is None: continue mirror_image = Image(item['mirror']['url']) if (mirror_image.registry == 'docker.io' and mirror_image.repository == 'library' and item['public']): _LOG.error("Image %s can't be mirrored to a public " "quay repository.", mirror_image) sys.exit(ExitCodes.ERROR) org_key = OrgKey(instance, org) summary[org_key].append({'name': item["name"], 'mirror': item['mirror'], 'server_url': server_url}) return summary
def test_parser(self, image, expected_struct): image = Image(image) assert image.scheme == expected_struct['scheme'] assert image.registry == expected_struct['registry'] assert image.repository == expected_struct['repository'] assert image.image == expected_struct['image'] assert image.tag == expected_struct['tag']
def _check_images(self, options): saas_file_name = options['saas_file_name'] resource_template_name = options['resource_template_name'] html_url = options['html_url'] resource = options['resource'] image_auth = options['image_auth'] image_patterns = options['image_patterns'] error_prefix = \ f"[{saas_file_name}/{resource_template_name}] {html_url}:" error = False images = self._collect_images(resource) if image_auth: username = image_auth['user'] password = image_auth['token'] else: username = None password = None for image in images: if image_patterns and \ not any(image.startswith(p) for p in image_patterns): error = True logging.error( f"{error_prefix} Image is not in imagePatterns: {image}") try: valid = Image(image, username=username, password=password) if not valid: error = True logging.error( f"{error_prefix} Image does not exist: {image}") continue except Exception: error = True logging.error(f"{error_prefix} Image is invalid: {image}") continue return error
def test_getitem(self): image = Image("quay.io/foo/bar:latest", response_cache={}, auth_token="atoken") other = image['current'] assert image.response_cache is other.response_cache assert other.auth_token is image.auth_token assert other.tag == 'current'
def _is_image_there(self, image): image_obj = Image(image) for registry, creds in self.registry_creds["auths"].items(): # Getting the credentials for the image_obj if "//" not in registry: registry = "//" + registry registry_obj = urlparse(registry) if registry_obj.netloc != image_obj.registry: continue image_obj.auth = (creds["username"], creds["password"]) # Checking if the image is already # in the registry if image_obj: return True return False
def test_parser(self, image, expected_struct): image = Image(image) assert image.scheme == expected_struct['scheme'] assert image.registry == expected_struct['registry'] assert image.repository == expected_struct['repository'] assert image.image == expected_struct['image'] assert image.tag == expected_struct.get('tag') expected_digest = expected_struct.get('digest') # Condition this to avoid the network. if expected_digest: assert image.digest == expected_digest
def test_username_and_password_ok(self, getauth, parseauth): r = requests.Response() r.status_code = 200 method = MagicMock(return_value=r) i = Image("quay.io/foo/bar:latest", username="******", password="******") i._request_get.__wrapped__(i, "http://www.google.com", method=method) method.assert_called_once() c = method.call_args_list[0] assert c[0] == ('http://www.google.com', ) assert 'Authorization' not in c[1]['headers'] assert c[1]['auth'] == i.auth getauth.assert_not_called() parseauth.assert_not_called()
def _check_images(self, resource): error = False images = self._collect_images(resource) if self.image_auth: username = self.image_auth['user'] password = self.image_auth['token'] else: username = None password = None for image in images: if not Image(image, username=username, password=password): error = True logging.error(f"Image does not exist: {image}") return error
def test_persistent_failure(self, getauth, parseauth): r = requests.Response() r.status_code = 401 r.headers['Www-Authenticate'] = 'something something' method = MagicMock(return_value=r) r = requests.Response() r.status_code = 200 i = Image("quay.io/foo/bar:latest", username="******", password="******") getauth.return_value = "anauthtoken" parseauth.return_value = "aparsedauth" with pytest.raises(requests.exceptions.HTTPError): i._request_get.__wrapped__(i, "http://www.google.com", method=method) getauth.assert_called_once() parseauth.assert_called_once()
def _check_image(image, image_patterns, image_auth, error_prefix): error = False if image_patterns and \ not any(image.startswith(p) for p in image_patterns): error = True logging.error( f"{error_prefix} Image is not in imagePatterns: {image}") try: valid = Image(image, **image_auth) if not valid: error = True logging.error(f"{error_prefix} Image does not exist: {image}") except Exception as e: error = True logging.error(f"{error_prefix} Image is invalid: {image}. " + f"details: {str(e)}") return error
def test_username_and_password_reauthenticate(self, getauth, parseauth): r = requests.Response() r.status_code = 401 r.headers['Www-Authenticate'] = 'something something' gets = [r] r = requests.Response() r.status_code = 200 gets.append(r) method = MagicMock(side_effect=gets) r = requests.Response() r.status_code = 200 i = Image("quay.io/foo/bar:latest", username="******", password="******") getauth.return_value = "anauthtoken" parseauth.return_value = "aparsedauth" i._request_get.__wrapped__(i, "http://www.google.com", method=method) parseauth.assert_called_once_with('something something') assert method.call_count == 2 assert i.auth_token == 'anauthtoken'
def process_sync_tasks(self): eight_hours = 28800 # 60 * 60 * 8 is_deep_sync = self._is_deep_sync(interval=eight_hours) summary = defaultdict(list) self.process_org_mirrors(summary) sync_tasks = defaultdict(list) for org_key, data in summary.items(): org = self.quay_api_store[org_key] org_name = org_key.org_name server_url = org["url"] username = org["push_token"]["user"] password = org["push_token"]["token"] for item in data: image = Image( f'{server_url}/{org_name}/{item["name"]}', username=username, password=password, ) mirror_url = item["mirror"]["url"] mirror_username = None mirror_password = None mirror_creds = None if item["mirror"].get("username") and item["mirror"].get("token"): mirror_username = item["mirror"]["username"] mirror_password = item["mirror"]["token"] mirror_creds = f"{mirror_username}:{mirror_password}" image_mirror = Image( mirror_url, username=mirror_username, password=mirror_password ) for tag in image_mirror: upstream = image_mirror[tag] downstream = image[tag] if tag not in image: _LOG.debug( "Image %s and mirror %s are out of sync", downstream, upstream, ) task = { "mirror_url": str(upstream), "mirror_creds": mirror_creds, "image_url": str(downstream), } sync_tasks[org_key].append(task) continue # Deep (slow) check only in non dry-run mode if self.dry_run: _LOG.debug( "Image %s and mirror %s are in sync", downstream, upstream ) continue # Deep (slow) check only from time to time if not is_deep_sync: _LOG.debug( "Image %s and mirror %s are in sync", downstream, upstream ) continue try: if downstream == upstream: _LOG.debug( "Image %s and mirror %s are in sync", downstream, upstream, ) continue except ImageComparisonError as details: _LOG.error("[%s]", details) continue _LOG.debug( "Image %s and mirror %s are out of sync", downstream, upstream ) sync_tasks[org_key].append( { "mirror_url": str(upstream), "mirror_creds": mirror_creds, "image_url": str(downstream), } ) return sync_tasks
def _process_template(self, options): saas_file_name = options['saas_file_name'] resource_template_name = options['resource_template_name'] image_auth = options['image_auth'] url = options['url'] path = options['path'] provider = options['provider'] target = options['target'] github = options['github'] target_ref = target['ref'] target_promotion = target.get('promotion') or {} resources = None html_url = None commit_sha = None if provider == 'openshift-template': hash_length = options['hash_length'] parameters = options['parameters'] environment = target['namespace']['environment'] environment_parameters = self._collect_parameters(environment) target_parameters = self._collect_parameters(target) consolidated_parameters = {} consolidated_parameters.update(environment_parameters) consolidated_parameters.update(parameters) consolidated_parameters.update(target_parameters) for replace_key, replace_value in consolidated_parameters.items(): if not isinstance(replace_value, str): continue replace_pattern = '${' + replace_key + '}' for k, v in consolidated_parameters.items(): if not isinstance(v, str): continue if replace_pattern in v: consolidated_parameters[k] = \ v.replace(replace_pattern, replace_value) get_file_contents_options = { 'url': url, 'path': path, 'ref': target_ref, 'github': github } try: template, html_url, commit_sha = \ self._get_file_contents(get_file_contents_options) except Exception as e: logging.error(f"[{url}/{path}:{target_ref}] " + f"error fetching template: {str(e)}") return None, None, None # add IMAGE_TAG only if it is unspecified image_tag = consolidated_parameters.get('IMAGE_TAG') if not image_tag: sha_substring = commit_sha[:hash_length] # IMAGE_TAG takes one of two forms: # - If saas file attribute 'use_channel_in_image_tag' is true, # it is {CHANNEL}-{SHA} # - Otherwise it is just {SHA} if self._get_saas_file_attribute("use_channel_in_image_tag"): try: channel = consolidated_parameters["CHANNEL"] except KeyError: logging.error( f"[{saas_file_name}/{resource_template_name}] " + f"{html_url}: CHANNEL is required when " + "'use_channel_in_image_tag' is true.") return None, None, None image_tag = f"{channel}-{sha_substring}" else: image_tag = sha_substring consolidated_parameters['IMAGE_TAG'] = image_tag # This relies on IMAGE_TAG already being calculated. need_repo_digest = self._parameter_value_needed( "REPO_DIGEST", consolidated_parameters, template) need_image_digest = self._parameter_value_needed( "IMAGE_DIGEST", consolidated_parameters, template) if need_repo_digest or need_image_digest: try: logging.debug("Generating REPO_DIGEST.") registry_image = consolidated_parameters["REGISTRY_IMG"] except KeyError as e: logging.error( f"[{saas_file_name}/{resource_template_name}] " + f"{html_url}: error generating REPO_DIGEST. " + "Is REGISTRY_IMG missing? " + f"{str(e)}") return None, None, None try: image_uri = f"{registry_image}:{image_tag}" img = Image(image_uri, **image_auth) if need_repo_digest: consolidated_parameters["REPO_DIGEST"] = img.url_digest if need_image_digest: consolidated_parameters["IMAGE_DIGEST"] = img.digest except (rqexc.ConnectionError, rqexc.HTTPError) as e: logging.error( f"[{saas_file_name}/{resource_template_name}] " + f"{html_url}: error generating REPO_DIGEST for " + f"{image_uri}: {str(e)}") return None, None, None oc = OC('server', 'token', local=True) try: resources = oc.process(template, consolidated_parameters) except StatusCodeError as e: logging.error( f"[{saas_file_name}/{resource_template_name}] " + f"{html_url}: error processing template: {str(e)}") elif provider == 'directory': get_directory_contents_options = { 'url': url, 'path': path, 'ref': target_ref, 'github': github } try: resources, html_url, commit_sha = \ self._get_directory_contents( get_directory_contents_options) except Exception as e: logging.error(f"[{url}/{path}:{target_ref}] " + f"error fetching directory: {str(e)}") return None, None, None else: logging.error(f"[{saas_file_name}/{resource_template_name}] " + f"unknown provider: {provider}") target_promotion['commit_sha'] = commit_sha return resources, html_url, target_promotion
def test_no_tag(self): image = Image(f"quay.io/foo/bar@{A_SHA}") with pytest.raises(Exception) as e: _ = image.url_tag assert e.typename == 'NoTagForImageByDigest'
def test_tag_override(self, image, tag, expected_image_url): image = Image(image, tag) assert str(image) == expected_image_url
def test_str(self, image, expected_image_url): image = Image(image) assert str(image) == expected_image_url
def process_sync_tasks(self): eight_hours = 28800 # 60 * 60 * 8 is_deep_sync = self._is_deep_sync(interval=eight_hours) summary = self.process_repos_query() sync_tasks = defaultdict(list) for org, data in summary.items(): for item in data: image = Image(f'{item["server_url"]}/{org}/{item["name"]}') mirror_url = item['mirror']['url'] username = None password = None mirror_creds = None if item['mirror']['pullCredentials'] is not None: pull_credentials = item['mirror']['pullCredentials'] raw_data = secret_reader.read_all(pull_credentials, settings=self.settings) username = raw_data["user"] password = raw_data["token"] mirror_creds = f'{username}:{password}' image_mirror = Image(mirror_url, username=username, password=password) tags = item['mirror'].get('tags') tags_exclude = item['mirror'].get('tagsExclude') for tag in image_mirror: if not self.sync_tag(tags=tags, tags_exclude=tags_exclude, candidate=tag): continue upstream = image_mirror[tag] downstream = image[tag] if tag not in image: _LOG.debug('Image %s and mirror %s are out off sync', downstream, upstream) sync_tasks[org].append({'mirror_url': str(upstream), 'mirror_creds': mirror_creds, 'image_url': str(downstream)}) continue # Deep (slow) check only in non dry-run mode if self.dry_run: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue # Deep (slow) check only from time to time if not is_deep_sync: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue try: if downstream == upstream: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue except ImageComparisonError as details: _LOG.error('[%s]', details) continue _LOG.debug('Image %s and mirror %s are out of sync', downstream, upstream) sync_tasks[org].append({'mirror_url': str(upstream), 'mirror_creds': mirror_creds, 'image_url': str(downstream)}) return sync_tasks
def process_sync_tasks(self): eight_hours = 28800 # 60 * 60 * 8 is_deep_sync = self._is_deep_sync(interval=eight_hours) summary = defaultdict(list) self.process_org_mirrors(summary) sync_tasks = defaultdict(list) for org_key, data in summary.items(): org = self.quay_api_store[org_key] org_name = org_key.org_name server_url = org['url'] username = org['push_token']['user'] password = org['push_token']['token'] for item in data: image = Image(f'{server_url}/{org_name}/{item["name"]}', username=username, password=password) mirror_url = item['mirror']['url'] mirror_username = None mirror_password = None mirror_creds = None if item['mirror'].get('username') and \ item['mirror'].get('token'): mirror_username = item['mirror']['username'] mirror_password = item['mirror']['token'] mirror_creds = f'{mirror_username}:{mirror_password}' image_mirror = Image(mirror_url, username=mirror_username, password=mirror_password) for tag in image_mirror: upstream = image_mirror[tag] downstream = image[tag] if tag not in image: _LOG.debug('Image %s and mirror %s are out of sync', downstream, upstream) task = {'mirror_url': str(upstream), 'mirror_creds': mirror_creds, 'image_url': str(downstream)} sync_tasks[org_key].append(task) continue # Deep (slow) check only in non dry-run mode if self.dry_run: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue # Deep (slow) check only from time to time if not is_deep_sync: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue try: if downstream == upstream: _LOG.debug('Image %s and mirror %s are in sync', downstream, upstream) continue except ImageComparisonError as details: _LOG.error('[%s]', details) continue _LOG.debug('Image %s and mirror %s are out of sync', downstream, upstream) sync_tasks[org_key].append({'mirror_url': str(upstream), 'mirror_creds': mirror_creds, 'image_url': str(downstream)}) return sync_tasks