Ejemplo n.º 1
0
 def delete_repo_file(cls):
     for repo_class, _dummy in get_repo_file_classes():
         repo_file = repo_class()
         if os.path.exists(repo_file.path):
             os.unlink(repo_file.path)
     # When the repo is removed, also remove the override tracker
     WrittenOverrideCache.delete_cache()
Ejemplo n.º 2
0
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        self.manage_repos = manage_repos_enabled()

        self.release = None
        self.overrides = {}
        self.override_supported = False
        try:
            self.override_supported = bool(self.identity.is_valid() and self.uep and self.uep.supports_resource('content_overrides'))
        except socket.error as e:
            # swallow the error to fix bz 1298327
            log.exception(e)
            pass
        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # NOTE: if anything in the RepoActionInvoker init blocks, and it
        #       could, yum could still block. The closest thing to an
        #       event loop we have is the while True: sleep() in lock.py:Lock.acquire()

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides.read_cache_only()

            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()

            if cache_only:
                status = override_cache.read_cache_only()
            else:
                status = override_cache.load_status(self.uep, self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][item['name']] = item['value']
Ejemplo n.º 3
0
 def delete_repo_file(cls):
     for repo_class, _dummy in get_repo_file_classes():
         repo_file = repo_class()
         if os.path.exists(repo_file.path):
             os.unlink(repo_file.path)
     # When the repo is removed, also remove the override tracker
     WrittenOverrideCache.delete_cache()
Ejemplo n.º 4
0
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        if CFG.has_option('rhsm', 'manage_repos'):
            self.manage_repos = int(CFG.get('rhsm', 'manage_repos'))

        self.release = None
        self.overrides = {}
        self.override_supported = bool(
            self.identity.is_valid() and self.uep
            and self.uep.supports_resource('content_overrides'))
        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides._read_cache()
            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()
            if cache_only:
                status = override_cache._read_cache()
            else:
                status = override_cache.load_status(self.uep,
                                                    self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][
                    item['name']] = item['value']

        message = "Release API is not supported by the server. Using default."
        try:
            result = self.uep.getRelease(self.identity.uuid)
            self.release = result['releaseVer']
        except RemoteServerException, e:
            log.debug(message)
Ejemplo n.º 5
0
    def delete_repo_file(cls):
        yum_repo_file = YumRepoFile()
        if os.path.exists(yum_repo_file.path):
            os.unlink(yum_repo_file.path)

        # if we have zypper repo, remove it too.
        if os.path.exists(ZYPPER_REPO_DIR):
            zypper_repo_file = ZypperRepoFile()
            if os.path.exists(zypper_repo_file.path):
                os.unlink(zypper_repo_file.path)

        # When the repo is removed, also remove the override tracker
        WrittenOverrideCache.delete_cache()
Ejemplo n.º 6
0
    def delete_repo_file(cls):
        yum_repo_file = YumRepoFile()
        if os.path.exists(yum_repo_file.path):
            os.unlink(yum_repo_file.path)

        # if we have zypper repo, remove it too.
        if os.path.exists(ZYPPER_REPO_DIR):
            zypper_repo_file = ZypperRepoFile()
            if os.path.exists(zypper_repo_file.path):
                os.unlink(zypper_repo_file.path)

        # When the repo is removed, also remove the override tracker
        WrittenOverrideCache.delete_cache()
Ejemplo n.º 7
0
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        self.manage_repos = manage_repos_enabled()

        self.release = None
        self.overrides = {}
        self.override_supported = False
        try:
            self.override_supported = bool(self.identity.is_valid() and self.uep and self.uep.supports_resource('content_overrides'))
        except socket.error as e:
            # swallow the error to fix bz 1298327
            log.exception(e)
            pass
        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # NOTE: if anything in the RepoActionInvoker init blocks, and it
        #       could, yum could still block. The closest thing to an
        #       event loop we have is the while True: sleep() in lock.py:Lock.acquire()

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides._read_cache()

            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()

            if cache_only:
                status = override_cache._read_cache()
            else:
                status = override_cache.load_status(self.uep, self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][item['name']] = item['value']
Ejemplo n.º 8
0
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        if CFG.has_option('rhsm', 'manage_repos'):
            self.manage_repos = int(CFG.get('rhsm', 'manage_repos'))

        self.release = None
        self.overrides = {}
        self.override_supported = bool(self.identity.is_valid() and self.uep and self.uep.supports_resource('content_overrides'))
        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides._read_cache()
            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()
            if cache_only:
                status = override_cache._read_cache()
            else:
                status = override_cache.load_status(self.uep, self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][item['name']] = item['value']

        message = "Release API is not supported by the server. Using default."
        try:
            result = self.uep.getRelease(self.identity.uuid)
            self.release = result['releaseVer']
        except RemoteServerException, e:
            log.debug(message)
Ejemplo n.º 9
0
class RepoUpdateActionCommand(object):
    """UpdateAction for yum repos.

    Update yum repos when triggered. Generates yum repo config
    based on:
        - entitlement certs
        - repo overrides
        - rhsm config
        - yum config
        - manual changes made to "redhat.repo".

    Returns an RepoActionReport.
    """
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        self.manage_repos = manage_repos_enabled()

        self.release = None
        self.overrides = {}
        self.override_supported = False
        try:
            self.override_supported = bool(
                self.identity.is_valid() and self.uep
                and self.uep.supports_resource('content_overrides'))
        except socket.error as e:
            # swallow the error to fix bz 1298327
            log.exception(e)
            pass
        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # NOTE: if anything in the RepoActionInvoker init blocks, and it
        #       could, yum could still block. The closest thing to an
        #       event loop we have is the while True: sleep() in lock.py:Lock.acquire()

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides._read_cache()

            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()

            if cache_only:
                status = override_cache._read_cache()
            else:
                status = override_cache.load_status(self.uep,
                                                    self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][
                    item['name']] = item['value']

    def perform(self):
        # Load the RepoFile from disk, this contains all our managed yum repo sections:
        repo_file = RepoFile()

        # the [rhsm] manage_repos can be overridden to disable generation of the
        # redhat.repo file:
        if not self.manage_repos:
            log.debug("manage_repos is 0, skipping generation of: %s" %
                      repo_file.path)
            if repo_file.exists():
                log.info("Removing %s due to manage_repos configuration." %
                         repo_file.path)
                RepoActionInvoker.delete_repo_file()
            return 0

        repo_file.read()
        valid = set()

        # Iterate content from entitlement certs, and create/delete each section
        # in the RepoFile as appropriate:
        for cont in self.get_unique_content():
            valid.add(cont.id)
            existing = repo_file.section(cont.id)
            if existing is None:
                repo_file.add(cont)
                self.report_add(cont)
            else:
                # Updates the existing repo with new content
                self.update_repo(existing, cont)
                repo_file.update(existing)
                self.report_update(existing)

        for section in repo_file.sections():
            if section not in valid:
                self.report_delete(section)
                repo_file.delete(section)

        # Write new RepoFile to disk:
        repo_file.write()
        if self.override_supported:
            # Update with the values we just wrote
            self.written_overrides.overrides = self.overrides
            self.written_overrides.write_cache()
        log.info("repos updated: %s" % self.report)
        return self.report

    def get_unique_content(self):
        # FIXME Shouldn't this skip all of the repo updating?
        if not self.manage_repos:
            return []

        # baseurl and ca_cert could be "CDNInfo" or
        # bundle with "ConnectionInfo" etc
        baseurl = CFG.get('rhsm', 'baseurl')
        ca_cert = CFG.get('rhsm', 'repo_ca_cert')

        content_list = self.get_all_content(baseurl, ca_cert)

        # assumes items in content_list are hashable
        return set(content_list)

    # Expose as public API for RepoActionInvoker.is_managed, since that
    # is used by openshift tooling.
    # See https://bugzilla.redhat.com/show_bug.cgi?id=1223038
    def matching_content(self):
        return model.find_content(self.ent_source, content_type="yum")

    def get_all_content(self, baseurl, ca_cert):
        matching_content = self.matching_content()
        content_list = []

        # avoid checking for release/etc if there is no matching_content
        if not matching_content:
            return content_list

        # wait until we know we have content before fetching
        # release. We could make YumReleaseverSource understand
        # cache_only as well.
        release_source = YumReleaseverSource()

        for content in matching_content:
            repo = Repo.from_ent_cert_content(content, baseurl, ca_cert,
                                              release_source)

            # overrides are yum repo only at the moment, but
            # content sources will likely need to learn how to
            # apply overrides as well, perhaps generically
            if self.override_supported and self.apply_overrides:
                repo = self._set_override_info(repo)

            content_list.append(repo)

        return content_list

    def _set_override_info(self, repo):
        # In the disconnected case, self.overrides will be an empty list

        for name, value in self.overrides.get(repo.id, {}).items():
            repo[name] = value

        return repo

    def _is_overridden(self, repo, key):
        return key in self.overrides.get(repo.id, {})

    def _was_overridden(self, repo, key, value):
        written_value = self.written_overrides.overrides.get(repo.id,
                                                             {}).get(key)
        # Compare values as strings to avoid casting problems from io
        return written_value is not None and value is not None and str(
            written_value) == str(value)

    def _build_props(self, old_repo, new_repo):
        result = {}
        all_keys = old_repo.keys() + new_repo.keys()
        for key in all_keys:
            result[key] = Repo.PROPERTIES.get(key, (1, None))
        return result

    def update_repo(self, old_repo, new_repo):
        """
        Checks an existing repo definition against a potentially updated
        version created from most recent entitlement certificates and
        configuration. Creates, updates, and removes properties as
        appropriate and returns the number of changes made. (if any)
        """
        changes_made = 0

        for key, (mutable, default) in self._build_props(old_repo,
                                                         new_repo).items():
            new_val = new_repo.get(key)

            # Mutable properties should be added if not currently defined,
            # otherwise left alone. However if we see that the property was overridden
            # but that override has since been removed, we need to revert to the default
            # value.
            if mutable and not self._is_overridden(old_repo, key) \
                    and not self._was_overridden(old_repo, key, old_repo.get(key)):
                if (new_val is not None) and (not old_repo.get(key)):
                    if old_repo.get(key) == new_val:
                        continue
                    old_repo[key] = new_val
                    changes_made += 1

            # Immutable properties should be always be added/updated,
            # and removed if undefined in the new repo definition.
            else:
                if new_val is None or (str(new_val).strip() == ""):
                    # Immutable property should be removed:
                    if key in old_repo.keys():
                        del old_repo[key]
                        changes_made += 1
                    continue

                # Unchanged:
                if old_repo.get(key) == new_val:
                    continue

                old_repo[key] = new_val
                changes_made += 1

        return changes_made

    def report_update(self, repo):
        self.report.repo_updates.append(repo)

    def report_add(self, repo):
        self.report.repo_added.append(repo)

    def report_delete(self, section):
        self.report.repo_deleted.append(section)
Ejemplo n.º 10
0
 def delete_repo_file(cls):
     repo_file = RepoFile()
     if os.path.exists(repo_file.path):
         os.unlink(repo_file.path)
     # When the repo is removed, also remove the override tracker
     WrittenOverrideCache.delete_cache()
Ejemplo n.º 11
0
 def delete_repo_file(cls):
     repo_file = RepoFile()
     if os.path.exists(repo_file.path):
         os.unlink(repo_file.path)
     # When the repo is removed, also remove the override tracker
     WrittenOverrideCache.delete_cache()
Ejemplo n.º 12
0
class RepoUpdateActionCommand(object):
    """UpdateAction for yum repos.

    Update yum repos when triggered. Generates yum repo config
    based on:
        - entitlement certs
        - repo overrides
        - rhsm config
        - yum config
        - manual changes made to "redhat.repo".

    Returns an RepoActionReport.
    """
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        if CFG.has_option('rhsm', 'manage_repos'):
            self.manage_repos = int(CFG.get('rhsm', 'manage_repos'))

        self.release = None
        self.overrides = {}
        self.override_supported = bool(self.identity.is_valid() and self.uep and self.uep.supports_resource('content_overrides'))
        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides._read_cache()
            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()
            if cache_only:
                status = override_cache._read_cache()
            else:
                status = override_cache.load_status(self.uep, self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][item['name']] = item['value']

    def perform(self):
        # Load the RepoFile from disk, this contains all our managed yum repo sections:
        repo_file = RepoFile()

        # the [rhsm] manage_repos can be overridden to disable generation of the
        # redhat.repo file:
        if not self.manage_repos:
            log.debug("manage_repos is 0, skipping generation of: %s" %
                    repo_file.path)
            if repo_file.exists():
                log.info("Removing %s due to manage_repos configuration." %
                        repo_file.path)
                RepoActionInvoker.delete_repo_file()
            return 0

        repo_file.read()
        valid = set()

        # Iterate content from entitlement certs, and create/delete each section
        # in the RepoFile as appropriate:
        for cont in self.get_unique_content():
            valid.add(cont.id)
            existing = repo_file.section(cont.id)
            if existing is None:
                repo_file.add(cont)
                self.report_add(cont)
            else:
                # Updates the existing repo with new content
                self.update_repo(existing, cont)
                repo_file.update(existing)
                self.report_update(existing)

        for section in repo_file.sections():
            if section not in valid:
                self.report_delete(section)
                repo_file.delete(section)

        # Write new RepoFile to disk:
        repo_file.write()
        if self.override_supported:
            # Update with the values we just wrote
            self.written_overrides.overrides = self.overrides
            self.written_overrides.write_cache()
        log.info("repos updated: %s" % self.report)
        return self.report

    def get_unique_content(self):
        # FIXME Shouldn't this skip all of the repo updating?
        if not self.manage_repos:
            return []

        # baseurl and ca_cert could be "CDNInfo" or
        # bundle with "ConnectionInfo" etc
        baseurl = CFG.get('rhsm', 'baseurl')
        ca_cert = CFG.get('rhsm', 'repo_ca_cert')

        content_list = self.get_all_content(baseurl, ca_cert)

        # assumes items in content_list are hashable
        return set(content_list)

    def get_all_content(self, baseurl, ca_cert):
        matching_content = model.find_content(self.ent_source,
                                              content_type="yum")

        content_list = []

        # avoid checking for release/etc if there is no matching_content
        if not matching_content:
            return content_list

        # wait until we know we have content before fetching
        # release. We could make YumReleaseverSource understand
        # cache_only as well.
        release_source = YumReleaseverSource()

        for content in matching_content:
            repo = Repo.from_ent_cert_content(content, baseurl, ca_cert,
                                              release_source)

            # overrides are yum repo only at the moment, but
            # content sources will likely need to learn how to
            # apply overrides as well, perhaps generically
            if self.override_supported and self.apply_overrides:
                repo = self._set_override_info(repo)

            content_list.append(repo)

        return content_list

    def _set_override_info(self, repo):
        # In the disconnected case, self.overrides will be an empty list

        for name, value in self.overrides.get(repo.id, {}).items():
            repo[name] = value

        return repo

    def _is_overridden(self, repo, key):
        return key in self.overrides.get(repo.id, {})

    def _was_overridden(self, repo, key, value):
        written_value = self.written_overrides.overrides.get(repo.id, {}).get(key)
        # Compare values as strings to avoid casting problems from io
        return written_value is not None and value is not None and str(written_value) == str(value)

    def _build_props(self, old_repo, new_repo):
        result = {}
        all_keys = old_repo.keys() + new_repo.keys()
        for key in all_keys:
            result[key] = Repo.PROPERTIES.get(key, (1, None))
        return result

    def update_repo(self, old_repo, new_repo):
        """
        Checks an existing repo definition against a potentially updated
        version created from most recent entitlement certificates and
        configuration. Creates, updates, and removes properties as
        appropriate and returns the number of changes made. (if any)
        """
        changes_made = 0

        for key, (mutable, default) in self._build_props(old_repo, new_repo).items():
            new_val = new_repo.get(key)

            # Mutable properties should be added if not currently defined,
            # otherwise left alone. However if we see that the property was overridden
            # but that override has since been removed, we need to revert to the default
            # value.
            if mutable and not self._is_overridden(old_repo, key) \
                    and not self._was_overridden(old_repo, key, old_repo.get(key)):
                if (new_val is not None) and (not old_repo.get(key)):
                    if old_repo.get(key) == new_val:
                        continue
                    old_repo[key] = new_val
                    changes_made += 1

            # Immutable properties should be always be added/updated,
            # and removed if undefined in the new repo definition.
            else:
                if new_val is None or (str(new_val).strip() == ""):
                    # Immutable property should be removed:
                    if key in old_repo.keys():
                        del old_repo[key]
                        changes_made += 1
                    continue

                # Unchanged:
                if old_repo.get(key) == new_val:
                    continue

                old_repo[key] = new_val
                changes_made += 1

        return changes_made

    def report_update(self, repo):
        self.report.repo_updates.append(repo)

    def report_add(self, repo):
        self.report.repo_added.append(repo)

    def report_delete(self, section):
        self.report.repo_deleted.append(section)
Ejemplo n.º 13
0
class RepoUpdateActionCommand(object):
    """UpdateAction for yum repos.

    Update yum repos when triggered. Generates yum repo config
    based on:
        - entitlement certs
        - repo overrides
        - rhsm config
        - yum config
        - manual changes made to "redhat.repo".

    If the system in question has a zypper repo directory, will also generate
    zypper repo config.

    Returns an RepoActionReport.
    """
    def __init__(self, cache_only=False, apply_overrides=True):

        log.debug("Updating repo triggered with following attributes: cache_only=%s, apply_overrides=%s" %
                  (str(cache_only), str(apply_overrides)))

        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        self.manage_repos = manage_repos_enabled()

        self.release = None
        self.overrides = {}
        self.override_supported = False

        if not cache_only:
            self.uep = self.cp_provider.get_consumer_auth_cp()
        else:
            self.uep = None

        if self.identity.is_valid():
            supported_resources = ServerCache.get_supported_resources(self.identity.uuid, self.uep)
            self.override_supported = 'content_overrides' in supported_resources

        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            log.debug("The system is not registered. Skipping refreshing data from server.")
            return

        # NOTE: if anything in the RepoActionInvoker init blocks, and it
        #       could, yum could still block. The closest thing to an
        #       event loop we have is the while True: sleep() in lock.py:Lock.acquire()

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides.read_cache_only()

            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()

            if cache_only:
                status = override_cache.read_cache_only()
            else:
                status = override_cache.load_status(self.uep, self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][item['name']] = item['value']

    def perform(self):
        # the [rhsm] manage_repos can be overridden to disable generation of the
        # redhat.repo file:
        if not self.manage_repos:
            log.debug("manage_repos is 0, skipping generation of repo files")
            for repo_class, _dummy in get_repo_file_classes():
                repo_file = repo_class()
                if repo_file.exists():
                    log.info("Removing %s due to manage_repos configuration." %
                            repo_file.path)
            RepoActionInvoker.delete_repo_file()
            return 0

        repo_pairs = []
        for repo_class, server_val_repo_class in get_repo_file_classes():
            # Load the RepoFile from disk, this contains all our managed yum repo sections.
            # We want to instantiate late because having a long-lived instance creates the possibility
            # of reading the file twice.  Despite it's name, the RepoFile object corresponds more to a
            # dictionary of config settings than a single file.  Doing a double read can result in comments
            # getting loaded twice.  A feedback loop can result causing the repo file to grow at O(n^2).
            # See BZ 1658409
            repo_pairs.append((repo_class(), server_val_repo_class()))

        for repo_file, server_val_repo_file in repo_pairs:
            repo_file.read()
            server_val_repo_file.read()
        valid = set()

        # Iterate content from entitlement certs, and create/delete each section
        # in the RepoFile as appropriate:
        for cont in self.get_unique_content():
            valid.add(cont.id)

            for repo_file, server_value_repo_file in repo_pairs:
                if cont.content_type in repo_file.CONTENT_TYPES:
                    fixed_cont = repo_file.fix_content(cont)
                    existing = repo_file.section(fixed_cont.id)
                    server_value_repo = server_value_repo_file.section(fixed_cont.id)
                    if server_value_repo is None:
                        server_value_repo = fixed_cont
                        server_value_repo_file.add(fixed_cont)
                    if existing is None:
                        repo_file.add(fixed_cont)
                        self.report_add(fixed_cont)
                    else:
                        # Updates the existing repo with new content
                        self.update_repo(existing, fixed_cont, server_value_repo)
                        repo_file.update(existing)
                        server_value_repo_file.update(server_value_repo)
                        self.report_update(existing)

        for repo_file, server_value_repo_file in repo_pairs:
            for section in server_value_repo_file.sections():
                if section not in valid:
                    self.report_delete(section)
                    server_value_repo_file.delete(section)
                    repo_file.delete(section)

            # Write new RepoFile(s) to disk:
            server_value_repo_file.write()
            repo_file.write()

        if self.override_supported:
            # Update with the values we just wrote
            self.written_overrides.overrides = self.overrides
            self.written_overrides.write_cache()
        log.info("repos updated: %s" % self.report)
        return self.report

    def get_unique_content(self):
        # FIXME Shouldn't this skip all of the repo updating?
        if not self.manage_repos:
            return []

        # baseurl and ca_cert could be "CDNInfo" or
        # bundle with "ConnectionInfo" etc
        baseurl = conf['rhsm']['baseurl']
        ca_cert = conf['rhsm']['repo_ca_cert']

        content_list = self.get_all_content(baseurl, ca_cert)

        # assumes items in content_list are hashable
        return set(content_list)

    # Expose as public API for RepoActionInvoker.is_managed, since that
    # is used by Openshift tooling.
    # See https://bugzilla.redhat.com/show_bug.cgi?id=1223038
    def matching_content(self):
        content = []
        for content_type in ALLOWED_CONTENT_TYPES:
            content += model.find_content(self.ent_source,
                                          content_type=content_type)
        return content

    def get_all_content(self, baseurl, ca_cert):
        matching_content = self.matching_content()
        content_list = []

        # avoid checking for release/etc if there is no matching_content
        if not matching_content:
            return content_list

        # wait until we know we have content before fetching
        # release. We could make YumReleaseverSource understand
        # cache_only as well.
        release_source = YumReleaseverSource()

        for content in matching_content:
            repo = Repo.from_ent_cert_content(content, baseurl, ca_cert,
                                              release_source)

            # overrides are yum repo only at the moment, but
            # content sources will likely need to learn how to
            # apply overrides as well, perhaps generically
            if self.override_supported and self.apply_overrides:
                repo = self._set_override_info(repo)

            content_list.append(repo)

        return content_list

    def _set_override_info(self, repo):
        # In the disconnected case, self.overrides will be an empty list

        for name, value in list(self.overrides.get(repo.id, {}).items()):
            repo[name] = value

        return repo

    def _is_overridden(self, repo, key):
        return key in self.overrides.get(repo.id, {})

    def _was_overridden(self, repo, key, value):
        written_value = self.written_overrides.overrides.get(repo.id, {}).get(key)
        # Compare values as strings to avoid casting problems from io
        return written_value is not None and value is not None and str(written_value) == str(value)

    def _build_props(self, old_repo, new_repo):
        result = {}
        all_keys = list(old_repo.keys()) + list(new_repo.keys())
        for key in all_keys:
            result[key] = Repo.PROPERTIES.get(key, (1, None))
        return result

    def update_repo(self, old_repo, new_repo, server_value_repo=None):
        """
        Checks an existing repo definition against a potentially updated
        version created from most recent entitlement certificates and
        configuration. Creates, updates, and removes properties as
        appropriate and returns the number of changes made. (if any)
        """
        changes_made = 0
        if server_value_repo is None:
            server_value_repo = {}

        for key, (mutable, _default) in list(self._build_props(old_repo, new_repo).items()):
            new_val = new_repo.get(key)

            # Mutable properties should be added if not currently defined,
            # otherwise left alone. However if we see that the property was overridden
            # but that override has since been removed, we need to revert to the default
            # value.
            if mutable and not self._is_overridden(old_repo, key) \
                    and not self._was_overridden(old_repo, key, old_repo.get(key)):
                if (new_val is not None) and (not old_repo.get(key) or
                        old_repo.get(key) == server_value_repo.get(key)):
                    if old_repo.get(key) == new_val:
                        continue
                    old_repo[key] = new_val
                    changes_made += 1

            # Immutable properties should be always be added/updated,
            # and removed if undefined in the new repo definition.
            else:
                if new_val is None or (str(new_val).strip() == ""):
                    # Immutable property should be removed:
                    if key in list(old_repo.keys()):
                        del old_repo[key]
                        changes_made += 1
                    continue

                # Unchanged:
                if old_repo.get(key) == new_val:
                    continue

                old_repo[key] = new_val
                changes_made += 1

            if mutable and new_val is not None:
                server_value_repo[key] = new_val

        return changes_made

    def report_update(self, repo):
        self.report.repo_updates.append(repo)

    def report_add(self, repo):
        self.report.repo_added.append(repo)

    def report_delete(self, section):
        self.report.repo_deleted.append(section)
Ejemplo n.º 14
0
class RepoUpdateActionCommand(object):
    """UpdateAction for yum repos.

    Update yum repos when triggered. Generates yum repo config
    based on:
        - entitlement certs
        - repo overrides
        - rhsm config
        - yum config
        - manual changes made to "redhat.repo".

    If the system in question has a zypper repo directory, will also generate
    zypper repo config.

    Returns an RepoActionReport.
    """
    def __init__(self, cache_only=False, apply_overrides=True):
        self.identity = inj.require(inj.IDENTITY)

        # These should probably move closer their use
        self.ent_dir = inj.require(inj.ENT_DIR)
        self.prod_dir = inj.require(inj.PROD_DIR)

        self.ent_source = ent_cert.EntitlementDirEntitlementSource()

        self.cp_provider = inj.require(inj.CP_PROVIDER)
        self.uep = self.cp_provider.get_consumer_auth_cp()

        self.manage_repos = 1
        self.apply_overrides = apply_overrides
        self.manage_repos = manage_repos_enabled()

        self.release = None
        self.overrides = {}
        self.override_supported = False
        try:
            self.override_supported = bool(self.identity.is_valid() and self.uep and self.uep.supports_resource('content_overrides'))
        except (socket.error, connection.ConnectionException) as e:
            # swallow the error to fix bz 1298327
            log.exception(e)
            pass

        self.written_overrides = WrittenOverrideCache()

        # FIXME: empty report at the moment, should be changed to include
        # info about updated repos
        self.report = RepoActionReport()
        self.report.name = "Repo updates"
        # If we are not registered, skip trying to refresh the
        # data from the server
        if not self.identity.is_valid():
            return

        # NOTE: if anything in the RepoActionInvoker init blocks, and it
        #       could, yum could still block. The closest thing to an
        #       event loop we have is the while True: sleep() in lock.py:Lock.acquire()

        # Only attempt to update the overrides if they are supported
        # by the server.
        if self.override_supported:
            self.written_overrides.read_cache_only()

            try:
                override_cache = inj.require(inj.OVERRIDE_STATUS_CACHE)
            except KeyError:
                override_cache = OverrideStatusCache()

            if cache_only:
                status = override_cache.read_cache_only()
            else:
                status = override_cache.load_status(self.uep, self.identity.uuid)

            for item in status or []:
                # Don't iterate through the list
                if item['contentLabel'] not in self.overrides:
                    self.overrides[item['contentLabel']] = {}
                self.overrides[item['contentLabel']][item['name']] = item['value']

    def perform(self):
        # Load the RepoFile from disk, this contains all our managed yum repo sections:
        yum_repo_file = YumRepoFile()
        server_value_repo_file = YumRepoFile('var/lib/rhsm/repo_server_val/')
        zypper_repo_file = None
        if os.path.exists(ZYPPER_REPO_DIR):
            zypper_repo_file = ZypperRepoFile()

        # the [rhsm] manage_repos can be overridden to disable generation of the
        # redhat.repo file:
        if not self.manage_repos:
            log.debug("manage_repos is 0, skipping generation of: %s" %
                    yum_repo_file.path)
            if yum_repo_file.exists():
                log.info("Removing %s due to manage_repos configuration." %
                        yum_repo_file.path)
                RepoActionInvoker.delete_repo_file()
            return 0

        yum_repo_file.read()
        server_value_repo_file.read()
        if zypper_repo_file:
            zypper_repo_file.read()
        valid = set()

        # Iterate content from entitlement certs, and create/delete each section
        # in the RepoFile as appropriate:
        for cont in self.get_unique_content():
            valid.add(cont.id)
            existing = yum_repo_file.section(cont.id)
            server_value_repo = server_value_repo_file.section(cont.id)
            if server_value_repo is None:
                server_value_repo = cont
                server_value_repo_file.add(cont)
            if existing is None:
                yum_repo_file.add(cont)
                self.report_add(cont)
            else:
                # Updates the existing repo with new content
                self.update_repo(existing, cont, server_value_repo)
                yum_repo_file.update(existing)
                server_value_repo_file.update(server_value_repo)
                self.report_update(existing)

            if zypper_repo_file:  # no reporting for zypper, already reported for yum
                zypper_cont = self._zypper_content(cont)
                existing = zypper_repo_file.section(zypper_cont.id)
                if existing is None:
                    zypper_repo_file.add(zypper_cont)
                else:
                    zypper_repo_file.update(zypper_cont)

        for section in yum_repo_file.sections():
            if section not in valid:
                self.report_delete(section)
                yum_repo_file.delete(section)
                server_value_repo_file.delete(section)
                if zypper_repo_file:
                    zypper_repo_file.delete(section)

        # Write new RepoFile to disk:
        yum_repo_file.write()
        server_value_repo_file.write()
        if zypper_repo_file:
            zypper_repo_file.write()
        if self.override_supported:
            # Update with the values we just wrote
            self.written_overrides.overrides = self.overrides
            self.written_overrides.write_cache()
        log.info("repos updated: %s" % self.report)
        return self.report

    def _zypper_content(self, content):
        zypper_cont = content.copy()
        sslverify = zypper_cont['sslverify']
        sslcacert = zypper_cont['sslcacert']
        sslclientkey = zypper_cont['sslclientkey']
        sslclientcert = zypper_cont['sslclientcert']
        proxy = zypper_cont['proxy']
        proxy_username = zypper_cont['proxy_username']
        proxy_password = zypper_cont['proxy_password']

        del zypper_cont['sslverify']
        del zypper_cont['sslcacert']
        del zypper_cont['sslclientkey']
        del zypper_cont['sslclientcert']
        del zypper_cont['proxy']
        del zypper_cont['proxy_username']
        del zypper_cont['proxy_password']
        # NOTE looks like metadata_expire and ui_repoid_vars are ignored by zypper

        # clean up data for zypper
        if zypper_cont['gpgkey'] in ['https://', 'http://']:
            del zypper_cont['gpgkey']

        baseurl = zypper_cont['baseurl']
        parsed = urlparse(baseurl)
        zypper_query_args = parse_qs(parsed.query)
        if sslverify and sslverify in ['1']:
            zypper_query_args['ssl_verify'] = 'host'
        if sslcacert:
            zypper_query_args['ssl_capath'] = os.path.dirname(sslcacert)
        if sslclientkey:
            zypper_query_args['ssl_clientkey'] = sslclientkey
        if sslclientcert:
            zypper_query_args['ssl_clientcert'] = sslclientcert
        if proxy:
            zypper_query_args['proxy'] = proxy
        if proxy_username:
            zypper_query_args['proxyuser'] = proxy_username
        if proxy_password:
            zypper_query_args['proxypass'] = proxy_password
        zypper_query = urlencode(zypper_query_args)

        new_url = urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params, zypper_query, parsed.fragment))
        zypper_cont['baseurl'] = new_url

        return zypper_cont

    def get_unique_content(self):
        # FIXME Shouldn't this skip all of the repo updating?
        if not self.manage_repos:
            return []

        # baseurl and ca_cert could be "CDNInfo" or
        # bundle with "ConnectionInfo" etc
        baseurl = conf['rhsm']['baseurl']
        ca_cert = conf['rhsm']['repo_ca_cert']

        content_list = self.get_all_content(baseurl, ca_cert)

        # assumes items in content_list are hashable
        return set(content_list)

    # Expose as public API for RepoActionInvoker.is_managed, since that
    # is used by Openshift tooling.
    # See https://bugzilla.redhat.com/show_bug.cgi?id=1223038
    def matching_content(self):
        return model.find_content(self.ent_source,
                                  content_type="yum")

    def get_all_content(self, baseurl, ca_cert):
        matching_content = self.matching_content()
        content_list = []

        # avoid checking for release/etc if there is no matching_content
        if not matching_content:
            return content_list

        # wait until we know we have content before fetching
        # release. We could make YumReleaseverSource understand
        # cache_only as well.
        release_source = YumReleaseverSource()

        for content in matching_content:
            repo = Repo.from_ent_cert_content(content, baseurl, ca_cert,
                                              release_source)

            # overrides are yum repo only at the moment, but
            # content sources will likely need to learn how to
            # apply overrides as well, perhaps generically
            if self.override_supported and self.apply_overrides:
                repo = self._set_override_info(repo)

            content_list.append(repo)

        return content_list

    def _set_override_info(self, repo):
        # In the disconnected case, self.overrides will be an empty list

        for name, value in list(self.overrides.get(repo.id, {}).items()):
            repo[name] = value

        return repo

    def _is_overridden(self, repo, key):
        return key in self.overrides.get(repo.id, {})

    def _was_overridden(self, repo, key, value):
        written_value = self.written_overrides.overrides.get(repo.id, {}).get(key)
        # Compare values as strings to avoid casting problems from io
        return written_value is not None and value is not None and str(written_value) == str(value)

    def _build_props(self, old_repo, new_repo):
        result = {}
        all_keys = list(old_repo.keys()) + list(new_repo.keys())
        for key in all_keys:
            result[key] = Repo.PROPERTIES.get(key, (1, None))
        return result

    def update_repo(self, old_repo, new_repo, server_value_repo=None):
        """
        Checks an existing repo definition against a potentially updated
        version created from most recent entitlement certificates and
        configuration. Creates, updates, and removes properties as
        appropriate and returns the number of changes made. (if any)
        """
        changes_made = 0
        if server_value_repo is None:
            server_value_repo = {}

        for key, (mutable, _default) in list(self._build_props(old_repo, new_repo).items()):
            new_val = new_repo.get(key)

            # Mutable properties should be added if not currently defined,
            # otherwise left alone. However if we see that the property was overridden
            # but that override has since been removed, we need to revert to the default
            # value.
            if mutable and not self._is_overridden(old_repo, key) \
                    and not self._was_overridden(old_repo, key, old_repo.get(key)):
                if (new_val is not None) and (not old_repo.get(key) or
                        old_repo.get(key) == server_value_repo.get(key)):
                    if old_repo.get(key) == new_val:
                        continue
                    old_repo[key] = new_val
                    changes_made += 1

            # Immutable properties should be always be added/updated,
            # and removed if undefined in the new repo definition.
            else:
                if new_val is None or (str(new_val).strip() == ""):
                    # Immutable property should be removed:
                    if key in list(old_repo.keys()):
                        del old_repo[key]
                        changes_made += 1
                    continue

                # Unchanged:
                if old_repo.get(key) == new_val:
                    continue

                old_repo[key] = new_val
                changes_made += 1

            if (mutable and new_val is not None):
                server_value_repo[key] = new_val

        return changes_made

    def report_update(self, repo):
        self.report.repo_updates.append(repo)

    def report_add(self, repo):
        self.report.repo_added.append(repo)

    def report_delete(self, section):
        self.report.repo_deleted.append(section)