Beispiel #1
0
 def remove_apt_config(self):
     """Remove any repository apt configuration files."""
     series = util.get_platform_info('series')
     repo_filename = self.repo_list_file_tmpl.format(name=self.name,
                                                     series=series)
     keyring_file = os.path.join(apt.APT_KEYS_DIR, self.repo_key_file)
     entitlement = self.cfg.read_cache('machine-access-%s' % self.name).get(
         'entitlement', {})
     access_directives = entitlement.get('directives', {})
     repo_url = access_directives.get('aptURL', self.repo_url)
     if not repo_url:
         repo_url = self.repo_url
     if self.disable_apt_auth_only:
         # We only remove the repo from the apt auth file, because ESM
         # is a special-case: we want to be able to report on the
         # available ESM updates even when it's disabled
         apt.remove_repo_from_apt_auth_file(repo_url)
     else:
         apt.remove_auth_apt_repo(repo_filename, repo_url, keyring_file)
         apt.remove_apt_list_files(repo_url, series)
     if self.repo_pin_priority:
         repo_pref_file = self.repo_pref_file_tmpl.format(name=self.name,
                                                          series=series)
         if os.path.exists(repo_pref_file):
             os.unlink(repo_pref_file)
    def test_get_platform_info_with_version(self, series, release, version,
                                            os_release_content, tmpdir):
        release_file = tmpdir.join("os-release")
        release_file.write(os_release_content)
        parse_dict = util.parse_os_release(release_file.strpath)

        expected = {
            "arch": "arm64",
            "distribution": "Ubuntu",
            "kernel": "kernel-ver",
            "release": release,
            "series": series,
            "type": "Linux",
            "version": version,
        }

        with mock.patch("uaclient.util.parse_os_release") as m_parse:
            with mock.patch("uaclient.util.os.uname") as m_uname:
                with mock.patch("uaclient.util.subp") as m_subp:
                    m_parse.return_value = parse_dict
                    # (sysname, nodename, release, version, machine)
                    m_uname.return_value = posix.uname_result(
                        ("", "", "kernel-ver", "", "aarch64"))
                    m_subp.return_value = ("arm64\n", "")
                    assert expected == util.get_platform_info()
Beispiel #3
0
    def disable_apt_auth_only(self) -> bool:
        """Ubuntu EOL releases are in active ESM.

        Leave unauthenticated APT sources on disk with never pinning to ensure
        visibility to UA ESM: Infra packages for MOTD/APT messaging.
        """
        return util.is_active_esm(util.get_platform_info()["series"])
Beispiel #4
0
    def disable_apt_auth_only(self) -> bool:
        """All LTS remove APT auth files upon disable"""
        series = util.get_platform_info()["series"]

        if self.valid_service:
            return util.is_lts(series)
        return False
    def disable(self, silent=False, force=False):
        """Disable specific entitlement

        @return: True on success, False otherwise.
        """
        if not self.can_disable(silent, force):
            return False
        series = util.get_platform_info('series')
        repo_filename = self.repo_list_file_tmpl.format(
            name=self.name, series=series)
        keyring_file = os.path.join(apt.APT_KEYS_DIR, self.repo_key_file)
        entitlement_cfg = self.cfg.read_cache(
            'machine-access-%s' % self.name)['entitlement']
        access_directives = entitlement_cfg.get('directives', {})
        repo_url = access_directives.get('aptURL', self.repo_url)
        if not repo_url:
            repo_url = self.repo_url
        apt.remove_auth_apt_repo(repo_filename, repo_url, keyring_file)
        apt.remove_apt_list_files(repo_url, series)
        print('Removing packages: %s' % ', '.join(self.packages))
        try:
            util.subp(['apt-get', 'remove', '--assume-yes'] + self.packages)
        except util.ProcessExecutionError:
            pass
        self._set_local_enabled(False)
        return True
Beispiel #6
0
    def repo_pin_priority(self) -> Optional[str]:
        """All LTS should pin esm-apps."""
        series = util.get_platform_info()["series"]

        if self.valid_service:
            if util.is_lts(series):
                return "never"
        return None
def add_auth_apt_repo(
    repo_filename: str,
    repo_url: str,
    credentials: str,
    suites: "List[str]",
    keyring_file: str,
) -> None:
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    try:
        username, password = credentials.split(":")
    except ValueError:  # Then we have a bearer token
        username = "******"
        password = credentials
    series = util.get_platform_info()["series"]
    if repo_url.endswith("/"):
        repo_url = repo_url[:-1]
    assert_valid_apt_credentials(repo_url, username, password)

    # Does this system have updates suite enabled?
    updates_enabled = False
    policy = run_apt_command(["apt-cache", "policy"],
                             status.MESSAGE_APT_POLICY_FAILED)
    for line in policy.splitlines():
        # We only care about $suite-updates lines
        if "a={}-updates".format(series) not in line:
            continue
        # We only care about $suite-updates from the Ubuntu archive
        if "o=Ubuntu," not in line:
            continue
        updates_enabled = True
        break

    content = ""
    for suite in suites:
        if series not in suite:
            continue  # Only enable suites matching this current series
        maybe_comment = ""
        if "-updates" in suite and not updates_enabled:
            logging.debug(
                'Not enabling apt suite "%s" because "%s-updates" is not'
                " enabled",
                suite,
                series,
            )
            maybe_comment = "# "
        content += ("{maybe_comment}deb {url}/ubuntu {suite} main\n"
                    "# deb-src {url}/ubuntu {suite} main\n".format(
                        maybe_comment=maybe_comment, url=repo_url,
                        suite=suite))
    util.write_file(repo_filename, content)
    add_apt_auth_conf_entry(repo_url, username, password)
    source_keyring_file = os.path.join(KEYRINGS_DIR, keyring_file)
    destination_keyring_file = os.path.join(APT_KEYS_DIR, keyring_file)
    gpg.export_gpg_key(source_keyring_file, destination_keyring_file)
Beispiel #8
0
 def application_status(self) -> 'Tuple[status.ApplicationStatus, str]':
     super_status, super_msg = super().application_status()
     if super_status != status.ApplicationStatus.ENABLED:
         return super_status, super_msg
     running_kernel = util.get_platform_info()['kernel']
     if running_kernel.endswith('-fips'):
         return super_status, super_msg
     return (status.ApplicationStatus.PENDING,
             'Reboot to FIPS kernel required')
Beispiel #9
0
    def enable(self):
        """Enable specific entitlement.

        @return: True on success, False otherwise.
        """
        if not self.can_enable():
            return False
        series = util.get_platform_info('series')
        repo_filename = self.repo_list_file_tmpl.format(name=self.name,
                                                        series=series)
        resource_cfg = self.cfg.entitlements.get(self.name)
        directives = resource_cfg['entitlement'].get('directives', {})
        token = resource_cfg.get('resourceToken')
        if not token:
            logging.debug(
                'No specific resourceToken present. Using machine token'
                ' as %s credentials', self.title)
            token = self.cfg.machine_token['machineSecret']
        ppa_fingerprint = directives.get('aptKey')
        if ppa_fingerprint:
            keyring_file = None
        else:
            keyring_file = os.path.join(apt.KEYRINGS_DIR, self.repo_key_file)
        repo_url = directives.get('aptURL')
        if not repo_url:
            repo_url = self.repo_url
        try:
            apt.add_auth_apt_repo(repo_filename, repo_url, token, keyring_file,
                                  ppa_fingerprint)
        except apt.InvalidAPTCredentialsError as e:
            logging.error(str(e))
            return False
        if self.repo_pin_priority:
            repo_pref_file = self.repo_pref_file_tmpl.format(name=self.name,
                                                             series=series)
            apt.add_ppa_pinning(repo_pref_file, repo_url,
                                self.repo_pin_priority)
        if not os.path.exists(apt.APT_METHOD_HTTPS_FILE):
            util.subp(['apt-get', 'install', 'apt-transport-https'],
                      capture=True)
        if not os.path.exists(apt.CA_CERTIFICATES_FILE):
            util.subp(['apt-get', 'install', 'ca-certificates'], capture=True)
        try:
            util.subp(['apt-get', 'update'], capture=True)
            if self.packages:
                print('Installing {title} packages'
                      ' (this may take a while)'.format(title=self.title))
                util.subp(['apt-get', 'install'] + self.packages)
        except util.ProcessExecutionError:
            self.disable(silent=True, force=True)
            logging.error(
                status.MESSAGE_ENABLED_FAILED_TMPL.format(title=self.title))
            return False
        print(status.MESSAGE_ENABLED_TMPL.format(title=self.title))
        return True
Beispiel #10
0
 def application_status(self) -> "Tuple[status.ApplicationStatus, str]":
     super_status, super_msg = super().application_status()
     if super_status != status.ApplicationStatus.ENABLED:
         return super_status, super_msg
     running_kernel = util.get_platform_info()["kernel"]
     if running_kernel.endswith("-fips"):
         return super_status, super_msg
     return (
         status.ApplicationStatus.ENABLED,
         "Reboot to FIPS kernel required",
     )
Beispiel #11
0
 def request_resources(self) -> "Dict[str, Any]":
     """Requests list of entitlements available to this machine type."""
     platform = util.get_platform_info()
     query_params = {
         "architecture": platform["arch"],
         "series": platform["series"],
         "kernel": platform["kernel"],
     }
     resource_response, headers = self.request_url(
         API_V1_RESOURCES + "?" + urllib.parse.urlencode(query_params))
     return resource_response
def add_ppa_pinning(apt_preference_file, repo_url, origin, priority):
    """Add an apt preferences file and pin for a PPA."""
    series = util.get_platform_info()["series"]
    _protocol, repo_path = repo_url.split("://")
    if repo_path.endswith("/"):  # strip trailing slash
        repo_path = repo_path[:-1]
    content = ("Package: *\n"
               "Pin: release o={origin}, n={series}\n"
               "Pin-Priority: {priority}\n".format(origin=origin,
                                                   priority=priority,
                                                   series=series))
    util.write_file(apt_preference_file, content)
Beispiel #13
0
def add_ppa_pinning(apt_preference_file, repo_url, priority):
    """Add an apt preferences file and pin for a PPA."""
    series = util.get_platform_info('series')
    _protocol, repo_path = repo_url.split('://')
    origin = repo_path.replace('private-ppa.launchpad.net/', 'LP-PPA-')
    origin = origin.replace('/', '-')
    content = (
        'Package: *\n'
        'Pin: release o={origin}, n={series}\n'
        'Pin-Priority: {priority}\n'.format(
            origin=origin, priority=priority, series=series))
    util.write_file(apt_preference_file, content)
Beispiel #14
0
def get_instance_id(
        _iid_file: str = CLOUDINIT_INSTANCE_ID_FILE) -> "Optional[str]":
    """Query cloud instance-id from cmdline or CLOUDINIT_INSTANCE_ID_FILE"""
    if "trusty" != util.get_platform_info()["series"]:
        # Present in cloud-init on >= Xenial
        out, _err = util.subp(["cloud-init", "query", "instance_id"])
        return out.strip()
    if os.path.exists(_iid_file):
        return util.load_file(_iid_file)
    logging.warning("Unable to determine current instance-id from %s",
                    _iid_file)
    return None
def _get_data_files():
    data_files = [
        ("/etc/apt/apt.conf.d", ["apt.conf.d/51ubuntu-advantage-esm"]),
        ("/etc/ubuntu-advantage", ["uaclient.conf"]),
        ("/usr/share/keyrings", glob.glob("keyrings/*")),
        (defaults.CONFIG_DEFAULTS["data_dir"], []),
    ]
    rel_major, _rel_minor = util.get_platform_info()["release"].split(".", 1)
    if rel_major == "14":
        data_files.append(("/etc/init", glob.glob("upstart/*")))
    else:
        data_files.append(("/lib/systemd/system", glob.glob("systemd/*")))
    return data_files
    def static_affordances(self) -> Tuple[StaticAffordance, ...]:
        cloud_titles = {"aws": "an AWS", "azure": "an Azure", "gce": "a GCP"}
        cloud_id, _ = get_cloud_type()
        if cloud_id is None:
            cloud_id = ""

        series = util.get_platform_info().get("series", "")
        blocked_message = messages.FIPS_BLOCK_ON_CLOUD.format(
            series=series.title(), cloud=cloud_titles.get(cloud_id)
        )
        return (
            (
                blocked_message,
                lambda: self._allow_fips_on_cloud_instance(series, cloud_id),
                True,
            ),
        )
Beispiel #17
0
    def packages_status(self) -> Dict[str, CVEPackageStatus]:
        """Dict of package status dicts for the current Ubuntu series.

        Top-level keys are source packages names and each value is a
        CVEPackageStatus object
        """
        if hasattr(self, "_packages_status"):
            return self._packages_status  # type: ignore
        self._packages_status = {}
        series = util.get_platform_info()["series"]
        for package in self.response["packages"]:
            for pkg_status in package["statuses"]:
                if pkg_status["release_codename"] == series:
                    self._packages_status[package["name"]] = CVEPackageStatus(
                        pkg_status
                    )
        return self._packages_status
    def conditional_packages(self):
        """
        Dictionary of conditional packages to be installed when
        enabling FIPS services. For example, if we are enabling
        FIPS services in a machine that has openssh-client installed,
        we will perform two actions:

        1. Upgrade the package to the FIPS version
        2. Install the corresponding hmac version of that package
           when available.
        """
        series = util.get_platform_info().get("series", "")

        if util.is_container():
            return FIPS_CONTAINER_CONDITIONAL_PACKAGES.get(series, [])

        return FIPS_CONDITIONAL_PACKAGES.get(series, [])
    def check_affordances(self):
        """Check all contract affordances to vet current platform

        Affordances are a list of support constraints for the entitlement.
        Examples include a list of supported series, architectures for kernel
        revisions.

        @return: Tuple (boolean, detailed_message). True if platform passes
            all defined affordances, False if it doesn't meet any of the
            provided constraints.
        """
        entitlements = self.cfg.entitlements
        entitlement_cfg = entitlements.get(self.name)
        if not entitlement_cfg:
            return True, 'no entitlement affordances checked'
        affordances = entitlement_cfg['entitlement'].get('affordances', {})
        platform = util.get_platform_info()
        affordance_arches = affordances.get('architectures', [])
        if affordance_arches and platform['arch'] not in affordance_arches:
            return False, status.MESSAGE_INAPPLICABLE_ARCH_TMPL.format(
                title=self.title, arch=platform['arch'],
                supported_arches=', '.join(affordance_arches))
        affordance_series = affordances.get('series', [])
        if affordance_series and platform['series'] not in affordance_series:
            return False, status.MESSAGE_INAPPLICABLE_SERIES_TMPL.format(
                title=self.title, series=platform['series'])
        affordance_kernels = affordances.get('kernelFlavors', [])
        if affordance_kernels:
            kernel = platform['kernel']
            match = re.match(RE_KERNEL_UNAME, kernel)
            if not match:
                logging.warning('Could not parse kernel uname: %s', kernel)
                return (False,
                        status.MESSAGE_INAPPLICABLE_KERNEL_TMPL.format(
                            title=self.title, kernel=kernel,
                            supported_kernels=', '.join(affordance_kernels)))
            if match.group('flavor') not in affordance_kernels:
                return (False,
                        status.MESSAGE_INAPPLICABLE_KERNEL_TMPL.format(
                            title=self.title, kernel=kernel,
                            supported_kernels=', '.join(affordance_kernels)))
        for error_message, functor, expected_result in self.static_affordances:
            if functor() != expected_result:
                return False, error_message
        return True, ''
def migrate_apt_sources(clean=False, cfg=None, platform_info=None):
    """Migrate apt sources list files across upgrade/downgrade boundary.

    Only migrate apt sources if we are attached and an entitlement is
    active. (Meaning they have existing apt policy reference).

    @param clean: Boolean set True to clean up any apt config files written by
        Ubuntu Advantage Client.
    @param cfg: UAClient configuration instance for testing
    @param platform_info: platform information dict for testing
    """

    from uaclient import config
    from uaclient import entitlements
    from uaclient import status

    if not platform_info:  # for testing
        platform_info = util.get_platform_info()
    if not cfg:  # for testing
        cfg = config.UAConfig()
    if not any([cfg.is_attached, clean]):
        return
    for ent_cls in entitlements.ENTITLEMENT_CLASSES:
        if not hasattr(ent_cls, 'repo_url'):
            continue
        repo_list_glob = ent_cls.repo_list_file_tmpl.format(
            name=ent_cls.name, series='*')

        # Remove invalid series list files
        for path in glob.glob(repo_list_glob):
            if platform_info['series'] not in path or clean:
                logging.info('Removing old apt source file: %s', path)
                os.unlink(path)
        if clean:
            continue  # Skip any re-enable operations
        entitlement = ent_cls(cfg)
        op_status, _details = entitlement.operational_status()
        if op_status != status.ACTIVE:
            continue
        pass_affordances, details = entitlement.check_affordances()
        if not pass_affordances:
            logging.info(
                'Disabled %s after package upgrade/downgrade. %s',
                entitlement.title, details)
        entitlement.enable()  # Re-enable on current series
Beispiel #21
0
    def remove_apt_config(self,
                          run_apt_update: bool = True,
                          silent: bool = False):
        """Remove any repository apt configuration files.

        :param run_apt_update: If after removing the apt update
            command after removing the apt files.
        """
        series = util.get_platform_info()["series"]
        repo_filename = self.repo_list_file_tmpl.format(name=self.name)
        entitlement = self.cfg.entitlements[self.name].get("entitlement", {})
        access_directives = entitlement.get("directives", {})
        repo_url = access_directives.get("aptURL")
        if not repo_url:
            raise exceptions.MissingAptURLDirective(self.name)
        if self.disable_apt_auth_only:
            # We only remove the repo from the apt auth file, because
            # UA Infra: ESM is a special-case: we want to be able to report on
            # the available UA Infra: ESM updates even when it's disabled
            apt.remove_repo_from_apt_auth_file(repo_url)
            apt.restore_commented_apt_list_file(repo_filename)
        else:
            apt.remove_auth_apt_repo(repo_filename, repo_url,
                                     self.repo_key_file)
            apt.remove_apt_list_files(repo_url, series)
        if self.repo_pin_priority:
            repo_pref_file = self.repo_pref_file_tmpl.format(name=self.name)
            if self.repo_pin_priority == "never":
                # Disable the repo with a pinning file
                apt.add_ppa_pinning(
                    repo_pref_file,
                    repo_url,
                    self.origin,
                    self.repo_pin_priority,
                )
            elif os.path.exists(repo_pref_file):
                os.unlink(repo_pref_file)

        if run_apt_update:
            if not silent:
                event.info(messages.APT_UPDATING_LISTS)
            apt.run_apt_update_command()
    def process_contract_deltas(
        self,
        orig_access: "Dict[str, Any]",
        deltas: "Dict[str, Any]",
        allow_enable: bool = False,
    ) -> bool:
        """Process any contract access deltas for this entitlement.

        :param orig_access: Dictionary containing the original
            resourceEntitlement access details.
        :param deltas: Dictionary which contains only the changed access keys
        and values.
        :param allow_enable: Boolean set True if allowed to perform the enable
            operation. When False, a message will be logged to inform the user
            about the recommended enabled service.

        :return: True when delta operations are processed; False when noop.
        """
        if super().process_contract_deltas(orig_access, deltas, allow_enable):
            return True  # Already processed parent class deltas

        application_status, _ = self.application_status()
        if application_status == status.ApplicationStatus.DISABLED:
            return True
        logging.info(
            "Updating '%s' apt sources list on changed directives.", self.name
        )
        delta_entitlement = deltas.get("entitlement", {})
        if delta_entitlement.get("directives", {}).get("aptURL"):
            orig_entitlement = orig_access.get("entitlement", {})
            old_url = orig_entitlement.get("directives", {}).get("aptURL")
            if old_url:
                # Remove original aptURL and auth and rewrite
                series = util.get_platform_info()["series"]
                repo_filename = self.repo_list_file_tmpl.format(
                    name=self.name, series=series
                )
                apt.remove_auth_apt_repo(repo_filename, old_url)
        self.remove_apt_config()
        self.setup_apt_config()
        return True
def add_auth_apt_repo(repo_filename: str, repo_url: str, credentials: str,
                      suites: 'List[str]', keyring_file: str = None) -> None:
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    try:
        username, password = credentials.split(':')
    except ValueError:  # Then we have a bearer token
        username = '******'
        password = credentials
    series = util.get_platform_info('series')
    if repo_url.endswith('/'):
        repo_url = repo_url[:-1]
    if not valid_apt_credentials(repo_url, username, password):
        raise InvalidAPTCredentialsError(
            'Invalid APT credentials provided for %s' % repo_url)

    # Does this system have updates suite enabled?
    policy, _err = util.subp(['apt-cache', 'policy'])
    updates_enabled = bool(' %s-updates/' % series in policy)

    logging.info('Enabling authenticated repo: %s', repo_url)
    content = ''
    for suite in suites:
        if series not in suite:
            continue   # Only enable suites matching this current series
        if '-updates' in suite and not updates_enabled:
            logging.debug(
                'Not enabling apt suite "%s" because "%s-updates" is not'
                ' enabled', suite, series)
            continue
        content += ('deb {url}/ubuntu {suite} main\n'
                    '# deb-src {url}/ubuntu {suite} main\n'.format(
                        url=repo_url, suite=suite))
    util.write_file(repo_filename, content)
    add_apt_auth_conf_entry(repo_url, username, password)
    if keyring_file:
        logging.debug('Copying %s to %s', keyring_file, APT_KEYS_DIR)
        shutil.copy(keyring_file, APT_KEYS_DIR)
Beispiel #24
0
def add_auth_apt_repo(repo_filename, repo_url, credentials, keyring_file=None,
                      fingerprint=None):
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    series = util.get_platform_info('series')
    if not valid_apt_credentials(repo_url, series, credentials):
        raise InvalidAPTCredentialsError(
            'Invalid APT credentials provided for %s' % repo_url)
    logging.info('Enabling authenticated apt PPA: %s', repo_url)
    content = (
        'deb {url}/ubuntu {series} main\n'
        '# deb-src {url}/ubuntu {series} main\n'.format(
            url=repo_url, series=series))
    util.write_file(repo_filename, content)
    try:
        login, password = credentials.split(':')
    except ValueError:  # Then we have a bearer token
        login = '******'
        password = credentials
    apt_auth_file = get_apt_auth_file_from_apt_config()
    if os.path.exists(apt_auth_file):
        auth_content = util.load_file(apt_auth_file)
    else:
        auth_content = APT_AUTH_HEADER
    _protocol, repo_path = repo_url.split('://')
    auth_content += (
        'machine {repo_path}/ubuntu/ login {login} password'
        ' {password}\n'.format(
            repo_path=repo_path, login=login, password=password))
    util.write_file(apt_auth_file, auth_content, mode=0o600)
    if keyring_file:
        logging.debug('Copying %s to %s', keyring_file, APT_KEYS_DIR)
        shutil.copy(keyring_file, APT_KEYS_DIR)
    elif fingerprint:
        logging.debug('Importing APT PPA key %s', fingerprint)
        util.subp(
            ['apt-key', 'adv', '--keyserver', 'keyserver.ubuntu.com',
             '--recv-keys', fingerprint], capture=True)
Beispiel #25
0
    def test_request_resources_from_contract_server(self, client):
        """Call UAContractClient.request_resources to get updated resources."""
        cfg = FakeConfig()

        platform = util.get_platform_info()
        resource_params = {
            "architecture": platform["arch"],
            "series": platform["series"],
            "kernel": platform["kernel"],
        }
        url = API_V1_RESOURCES + "?" + urllib.parse.urlencode(resource_params)

        new_resources = [{"name": "new_resource", "available": False}]

        def fake_contract_client(cfg):
            fake_client = FakeContractClient(cfg)
            fake_client._responses = {url: {"resources": new_resources}}
            return fake_client

        client.side_effect = fake_contract_client
        assert new_resources == get_available_resources(cfg)
 def remove_apt_config(self):
     """Remove any repository apt configuration files."""
     series = util.get_platform_info()["series"]
     repo_filename = self.repo_list_file_tmpl.format(
         name=self.name, series=series
     )
     entitlement = self.cfg.entitlements[self.name].get("entitlement", {})
     access_directives = entitlement.get("directives", {})
     repo_url = access_directives.get("aptURL")
     if not repo_url:
         raise exceptions.MissingAptURLDirective(self.name)
     if self.disable_apt_auth_only:
         # We only remove the repo from the apt auth file, because ESM Infra
         # is a special-case: we want to be able to report on the
         # available ESM Infra updates even when it's disabled
         apt.remove_repo_from_apt_auth_file(repo_url)
         apt.restore_commented_apt_list_file(repo_filename)
     else:
         apt.remove_auth_apt_repo(
             repo_filename, repo_url, self.repo_key_file
         )
         apt.remove_apt_list_files(repo_url, series)
     if self.repo_pin_priority:
         repo_pref_file = self.repo_pref_file_tmpl.format(
             name=self.name, series=series
         )
         if self.repo_pin_priority == "never":
             # Disable the repo with a pinning file
             apt.add_ppa_pinning(
                 repo_pref_file,
                 repo_url,
                 self.origin,
                 self.repo_pin_priority,
             )
         elif os.path.exists(repo_pref_file):
             os.unlink(repo_pref_file)
     print(status.MESSAGE_APT_UPDATING_LISTS)
     apt.run_apt_command(
         ["apt-get", "update"], status.MESSAGE_APT_UPDATE_FAILED
     )
Beispiel #27
0
def add_auth_apt_repo(repo_filename,
                      repo_url,
                      credentials,
                      keyring_file=None,
                      fingerprint=None,
                      pockets=('main', )):
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    try:
        username, password = credentials.split(':')
    except ValueError:  # Then we have a bearer token
        username = '******'
        password = credentials
    series = util.get_platform_info('series')
    if repo_url.endswith('/'):
        repo_url = repo_url[:-1]
    if not valid_apt_credentials(repo_url, username, password):
        raise InvalidAPTCredentialsError(
            'Invalid APT credentials provided for %s' % repo_url)
    logging.info('Enabling authenticated repo: %s', repo_url)
    content = ''
    for pocket in pockets:
        content += ('deb {url}/ubuntu {series} {pocket}\n'
                    '# deb-src {url}/ubuntu {series} {pocket}\n'.format(
                        url=repo_url, series=series, pocket=pocket))
    util.write_file(repo_filename, content)
    add_apt_auth_conf_entry(repo_url, username, password)
    if keyring_file:
        logging.debug('Copying %s to %s', keyring_file, APT_KEYS_DIR)
        shutil.copy(keyring_file, APT_KEYS_DIR)
    elif fingerprint:
        logging.debug('Importing APT key %s', fingerprint)
        util.subp([
            'apt-key', 'adv', '--keyserver', 'keyserver.ubuntu.com',
            '--recv-keys', fingerprint
        ],
                  capture=True)
    def _replace_metapackage_on_cloud_instance(
        self, packages: List[str]
    ) -> List[str]:
        """
        Identify correct metapackage to be used if in a cloud instance.

        Currently, the contract backend is not delivering the right
        metapackage on a Bionic Azure or AWS cloud instance. For those
        clouds, we have cloud specific fips metapackages and we should
        use them. We are now performing that correction here, but this
        is a temporary fix.
        """
        cfg_disable_fips_metapackage_override = util.is_config_value_true(
            config=self.cfg.cfg,
            path_to_value="features.disable_fips_metapackage_override",
        )

        if cfg_disable_fips_metapackage_override:
            return packages

        series = util.get_platform_info().get("series")
        if series not in ("bionic", "focal"):
            return packages

        cloud_id, _ = get_cloud_type()
        if cloud_id is None:
            cloud_id = ""

        cloud_match = re.match(r"^(?P<cloud>(azure|aws|gce)).*", cloud_id)
        cloud_id = cloud_match.group("cloud") if cloud_match else ""

        if cloud_id not in ("azure", "aws", "gce"):
            return packages

        cloud_id = "gcp" if cloud_id == "gce" else cloud_id
        cloud_metapkg = "ubuntu-{}-fips".format(cloud_id)
        # Replace only the ubuntu-fips meta package if exists
        return [
            cloud_metapkg if pkg == "ubuntu-fips" else pkg for pkg in packages
        ]
    def is_pro_license_present(self, *, wait_for_change: bool) -> bool:
        url = LICENSES_URL

        if wait_for_change:
            url += WAIT_FOR_CHANGE
            if self.etag:
                url += LAST_ETAG.format(etag=self.etag)

        try:
            licenses, headers = util.readurl(
                url, headers={"Metadata-Flavor": "Google"})
        except HTTPError as e:
            LOG.error(e)
            if e.code == 400:
                raise exceptions.CancelProLicensePolling()
            else:
                raise exceptions.DelayProLicensePolling()
        license_ids = [license["id"] for license in licenses]
        self.etag = headers.get("ETag", None)

        series = util.get_platform_info()["series"]
        return GCP_LICENSES.get(series) in license_ids
Beispiel #30
0
    def request_contract_machine_attach(self, contract_token, machine_id=None):
        """Requests machine attach to the provided contact_id.

        @param contract_id: Unique contract id provided by contract service.
        @param contract_token: Token string providing authentication to
            ContractBearer service endpoint.
        @param machine_id: Optional unique system machine id. When absent,
            contents of /etc/machine-id will be used.

        @return: Dict of the JSON response containing the machine-token.
        """
        if not machine_id:
            machine_id = util.get_machine_id(self.cfg.data_dir)
        os = util.get_platform_info()
        arch = os.pop("arch")
        headers = self.headers()
        headers.update({"Authorization": "Bearer {}".format(contract_token)})
        data = {"machineId": machine_id, "architecture": arch, "os": os}
        machine_token, _headers = self.request_url(
            API_V1_CONTEXT_MACHINE_TOKEN, data=data, headers=headers)
        self.cfg.write_cache("machine-token", machine_token)
        return machine_token