def write_esm_announcement_message(cfg: config.UAConfig, series: str) -> None:
    """Write human-readable messages if ESM is offered on this LTS release.

    Do not write ESM announcements if esm-apps is enabled or beta.

    :param cfg: UAConfig instance for this environment.
    :param series: string of Ubuntu release series: 'xenial'.
    """
    apps_cls = entitlements.entitlement_factory(cfg=cfg, name="esm-apps")
    apps_inst = apps_cls(cfg)
    enabled_status = ApplicationStatus.ENABLED
    apps_not_enabled = apps_inst.application_status()[0] != enabled_status
    config_allow_beta = util.is_config_value_true(
        config=cfg.cfg, path_to_value="features.allow_beta")
    apps_not_beta = bool(config_allow_beta or not apps_cls.is_beta)

    msg_dir = os.path.join(cfg.data_dir, "messages")
    esm_news_file = os.path.join(msg_dir, ExternalMessage.ESM_ANNOUNCE.value)
    platform_info = util.get_platform_info()
    is_active_esm = util.is_active_esm(platform_info["series"])
    if is_active_esm:
        ua_esm_url = defaults.EOL_UA_URL_TMPL.format(
            hyphenatedrelease=platform_info["release"].replace(".", "-"))
    else:
        ua_esm_url = defaults.BASE_ESM_URL
    if apps_not_beta and apps_not_enabled:
        util.write_file(esm_news_file,
                        "\n" + ANNOUNCE_ESM_TMPL.format(url=ua_esm_url))
    else:
        util.remove_file(esm_news_file)
    def test_motd_and_apt_templates_written_separately(
        self,
        get_platform_info,
        subp,
        write_esm_announcement_message,
        write_apt_and_motd_templates,
        is_active_esm,
        util_is_lts,
        series,
        is_lts,
        esm_active,
        cfg_allow_beta,
        FakeConfig,
    ):
        """Update message templates for LTS releases with esm active.

        Assert cleanup of cached template and rendered message files when
        non-LTS release.

        Allow config allow_beta overrides.
        """
        get_platform_info.return_value = {"series": series}
        util_is_lts.return_value = is_lts
        is_active_esm.return_value = esm_active
        cfg = FakeConfig.for_attached_machine()
        if cfg_allow_beta:
            cfg.override_features({"allow_beta": cfg_allow_beta})
        msg_dir = os.path.join(cfg.data_dir, "messages")
        if not is_lts:
            # setup old msg files to assert they are removed
            os.makedirs(msg_dir)
            for msg_enum in ExternalMessage:
                msg_path = os.path.join(msg_dir, msg_enum.value)
                util.write_file(msg_path, "old")
                util.write_file(msg_path.replace(".tmpl", ""), "old")

        update_apt_and_motd_messages(cfg)
        os.path.exists(os.path.join(cfg.data_dir, "messages"))

        if is_lts:
            write_apt_calls = [mock.call(cfg, series)]
            esm_announce_calls = [mock.call(cfg, series)]
            subp_calls = [
                mock.call([
                    "/usr/lib/ubuntu-advantage/apt-esm-hook",
                    "process-templates",
                ])
            ]
        else:
            write_apt_calls = esm_announce_calls = []
            subp_calls = []
            # Cached msg templates removed on non-LTS
            for msg_enum in ExternalMessage:
                msg_path = os.path.join(msg_dir, msg_enum.value)
                assert False is os.path.exists(msg_path)
                assert False is os.path.exists(msg_path.replace(".tmpl", ""))
        assert (esm_announce_calls ==
                write_esm_announcement_message.call_args_list)
        assert write_apt_calls == write_apt_and_motd_templates.call_args_list
        assert subp_calls == subp.call_args_list
Ejemplo n.º 3
0
    def test_insert_repo_subroutes_before_existing_repo_basepath(
            self, m_get_apt_auth_file, tmpdir):
        """Insert new repo_url before first matching url base path."""
        auth_file = tmpdir.join("auth.conf").strpath
        util.write_file(
            auth_file,
            dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """),
        )

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(
            login="******",
            password="******",
            repo_url="http://fakerepo/subroute",
        )

        content_template = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/subroute/ login new password newpass{}
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """)
        expected_content = content_template.format(APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
Ejemplo n.º 4
0
    def test_replaces_old_credentials_with_new(self, m_get_apt_auth_file,
                                               tmpdir):
        """Replace old credentials for this repo_url on the same line."""
        auth_file = tmpdir.join("auth.conf").strpath
        util.write_file(
            auth_file,
            dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """),
        )

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(login="******",
                                password="******",
                                repo_url="http://fakerepo/")

        content_template = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login newlogin password newpass{}
            machine fakerepo2/ login other password otherpass
        """)
        expected_content = content_template.format(APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
Ejemplo n.º 5
0
    def test_get_platform_info_trusty(self):
        """get_platform_info handles trusty /etc/os-release parsing."""
        tdir = self.tmp_dir()
        release_file = os.path.join(tdir, 'os-release')
        util.write_file(release_file, OS_RELEASE_TRUSTY)
        parse_dict = util.parse_os_release(release_file)

        def fake_subp(cmd):
            if cmd == ['uname', '-r']:
                return 'kernel-ver', ''
            if cmd == ['uname', '-i']:
                return 'arm64', ''
            assert False, 'Unexpected command: %s' % cmd

        expected = {
            'arch': 'arm64',
            'distribution': 'Ubuntu',
            'kernel': 'kernel-ver',
            'release': '14.04',
            'series': 'trusty',
            'type': 'Linux'
        }
        with mock.patch('uaclient.util.parse_os_release') as m_parse:
            with mock.patch('uaclient.util.subp') as m_subp:
                m_parse.return_value = parse_dict
                m_subp.side_effect = fake_subp
                self.assertEqual(expected, util.get_platform_info())
Ejemplo n.º 6
0
def remove_auth_apt_repo(repo_filename,
                         repo_url,
                         keyring_file=None,
                         fingerprint=None):
    """Remove an authenticated apt repo and credentials to the system"""
    logging.info('Removing authenticated apt repo: %s', repo_url)
    util.del_file(repo_filename)
    if keyring_file:
        util.del_file(keyring_file)
    elif fingerprint:
        util.subp(['apt-key', 'del', fingerprint], capture=True)
    _protocol, repo_path = repo_url.split('://')
    if repo_path.endswith('/'):  # strip trailing slash
        repo_path = repo_path[:-1]
    apt_auth_file = get_apt_auth_file_from_apt_config()
    if os.path.exists(apt_auth_file):
        apt_auth = util.load_file(apt_auth_file)
        auth_prefix = 'machine {repo_path}/ login'.format(repo_path=repo_path)
        content = '\n'.join([
            line for line in apt_auth.splitlines() if auth_prefix not in line
        ])
        if not content:
            os.unlink(apt_auth_file)
        else:
            util.write_file(apt_auth_file, content, mode=0o600)
Ejemplo n.º 7
0
def add_apt_auth_conf_entry(repo_url, login, password):
    """Add or replace an apt auth line in apt's auth.conf file or conf.d."""
    apt_auth_file = get_apt_auth_file_from_apt_config()
    _protocol, repo_path = repo_url.split('://')
    if repo_path.endswith('/'):  # strip trailing slash
        repo_path = repo_path[:-1]
    if os.path.exists(apt_auth_file):
        orig_content = util.load_file(apt_auth_file)
    else:
        orig_content = ''
    repo_auth_line = (
        'machine {repo_path}/ login {login} password {password}{cmt}'.format(
            repo_path=repo_path,
            login=login,
            password=password,
            cmt=APT_AUTH_COMMENT))
    added_new_auth = False
    new_lines = []
    for line in orig_content.splitlines():
        machine_match = re.match(r'machine\s+(?P<repo_url>[.\-\w]+)/?.*', line)
        if machine_match:
            matched_repo = machine_match.group('repo_url')
            if matched_repo == repo_path:
                # Replace old auth with new auth at same line
                new_lines.append(repo_auth_line)
                added_new_auth = True
                continue
            if matched_repo in repo_path:
                # Insert our repo before. We are a more specific apt repo match
                new_lines.append(repo_auth_line)
                added_new_auth = True
        new_lines.append(line)
    if not added_new_auth:
        new_lines.append(repo_auth_line)
    util.write_file(apt_auth_file, '\n'.join(new_lines), mode=0o600)
Ejemplo n.º 8
0
    def test_insert_repo_subroutes_before_existing_repo_basepath(
            self, m_get_apt_auth_file, tmpdir):
        """Insert new repo_url before first matching url base path."""
        auth_file = tmpdir.join('auth.conf').strpath
        util.write_file(
            auth_file,
            dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """))

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(login='******',
                                password='******',
                                repo_url='http://fakerepo/subroute')

        expected_content = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/subroute/ login new password newpass%s
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass\
""" % APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
Ejemplo n.º 9
0
 def write_cache(self, key, content):
     if not os.path.exists(self.data_dir):
         os.makedirs(self.data_dir)
     filepath = self.data_path(key)
     if not isinstance(content, str):
         content = json.dumps(content)
     util.write_file(filepath, content)
Ejemplo n.º 10
0
 def test_get_machine_id_from_etc_machine_id(self):
     """Presence of /etc/machine-id is returned if it exists."""
     etc_machine_id = self.tmp_path('etc-machine-id', dir=self.tdir)
     self.assertEqual('/etc/machine-id', util.ETC_MACHINE_ID)
     util.write_file(etc_machine_id, 'etc-machine-id')
     with mock.patch('uaclient.util.ETC_MACHINE_ID', etc_machine_id):
         value = util.get_machine_id(data_dir=None)
     self.assertEqual('etc-machine-id', value)
Ejemplo n.º 11
0
def add_auth_apt_repo(
    repo_filename: str,
    repo_url: str,
    credentials: str,
    suites: "List[str]",
    keyring_file: str,
) -> None:
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    try:
        username, password = credentials.split(":")
    except ValueError:  # Then we have a bearer token
        username = "******"
        password = credentials
    series = util.get_platform_info()["series"]
    if repo_url.endswith("/"):
        repo_url = repo_url[:-1]
    assert_valid_apt_credentials(repo_url, username, password)

    # Does this system have updates suite enabled?
    updates_enabled = False
    policy = run_apt_command(["apt-cache", "policy"],
                             status.MESSAGE_APT_POLICY_FAILED)
    for line in policy.splitlines():
        # We only care about $suite-updates lines
        if "a={}-updates".format(series) not in line:
            continue
        # We only care about $suite-updates from the Ubuntu archive
        if "o=Ubuntu," not in line:
            continue
        updates_enabled = True
        break

    content = ""
    for suite in suites:
        if series not in suite:
            continue  # Only enable suites matching this current series
        maybe_comment = ""
        if "-updates" in suite and not updates_enabled:
            logging.debug(
                'Not enabling apt suite "%s" because "%s-updates" is not'
                " enabled",
                suite,
                series,
            )
            maybe_comment = "# "
        content += ("{maybe_comment}deb {url}/ubuntu {suite} main\n"
                    "# deb-src {url}/ubuntu {suite} main\n".format(
                        maybe_comment=maybe_comment, url=repo_url,
                        suite=suite))
    util.write_file(repo_filename, content)
    add_apt_auth_conf_entry(repo_url, username, password)
    source_keyring_file = os.path.join(KEYRINGS_DIR, keyring_file)
    destination_keyring_file = os.path.join(APT_KEYS_DIR, keyring_file)
    gpg.export_gpg_key(source_keyring_file, destination_keyring_file)
Ejemplo n.º 12
0
 def test_get_machine_id_from_var_lib_dbus_machine_id(self):
     """On trusty, machine id lives in of /var/lib/dbus/machine-id."""
     etc_machine_id = self.tmp_path('etc-machine-id', dir=self.tdir)
     dbus_machine_id = self.tmp_path('dbus-machine-id', dir=self.tdir)
     self.assertEqual('/var/lib/dbus/machine-id', util.DBUS_MACHINE_ID)
     util.write_file(dbus_machine_id, 'dbus-machine-id')
     with mock.patch('uaclient.util.DBUS_MACHINE_ID', dbus_machine_id):
         with mock.patch('uaclient.util.ETC_MACHINE_ID', etc_machine_id):
             value = util.get_machine_id(data_dir=None)
     self.assertEqual('dbus-machine-id', value)
def _write_template_or_remove(msg: str, tmpl_file: str):
    """Write a template to tmpl_file.

    When msg is empty, remove both tmpl_file and the generated msg.
    """
    if msg:
        util.write_file(tmpl_file, msg)
    else:
        util.remove_file(tmpl_file)
        if tmpl_file.endswith(".tmpl"):
            util.remove_file(tmpl_file.replace(".tmpl", ""))
Ejemplo n.º 14
0
def add_ppa_pinning(apt_preference_file, repo_url, priority):
    """Add an apt preferences file and pin for a PPA."""
    series = util.get_platform_info('series')
    _protocol, repo_path = repo_url.split('://')
    origin = repo_path.replace('private-ppa.launchpad.net/', 'LP-PPA-')
    origin = origin.replace('/', '-')
    content = (
        'Package: *\n'
        'Pin: release o={origin}, n={series}\n'
        'Pin-Priority: {priority}\n'.format(
            origin=origin, priority=priority, series=series))
    util.write_file(apt_preference_file, content)
Ejemplo n.º 15
0
def add_ppa_pinning(apt_preference_file, repo_url, origin, priority):
    """Add an apt preferences file and pin for a PPA."""
    series = util.get_platform_info()["series"]
    _protocol, repo_path = repo_url.split("://")
    if repo_path.endswith("/"):  # strip trailing slash
        repo_path = repo_path[:-1]
    content = ("Package: *\n"
               "Pin: release o={origin}, n={series}\n"
               "Pin-Priority: {priority}\n".format(origin=origin,
                                                   priority=priority,
                                                   series=series))
    util.write_file(apt_preference_file, content)
Ejemplo n.º 16
0
    def test_get_machine_id_uses_machine_id_from_data_dir(self):
        """When no machine-id is found, use machine-id from data_dir."""

        data_machine_id = self.tmp_path('machine-id', dir=self.tdir)
        util.write_file(data_machine_id, 'data-machine-id')

        def fake_exists(path):
            return bool(path == data_machine_id)

        with mock.patch('uaclient.util.os.path.exists') as m_exists:
            m_exists.side_effect = fake_exists
            value = util.get_machine_id(data_dir=self.tdir)
        self.assertEqual('data-machine-id', value)
Ejemplo n.º 17
0
def setup_apt_proxy(
    http_proxy: Optional[str] = None,
    https_proxy: Optional[str] = None,
    proxy_scope: Optional[AptProxyScope] = AptProxyScope.GLOBAL,
) -> None:
    """
    Writes an apt conf file that configures apt to use the proxies provided as
    args.
    If both args are None, then no apt conf file is written. If this function
    previously wrote a conf file, and was run again with both args as None,
    the existing file is removed.

    :param http_proxy: the url of the http proxy apt should use, or None
    :param https_proxy: the url of the https proxy apt should use, or None
    :return: None
    """
    if http_proxy or https_proxy:
        if proxy_scope:
            message = ""
            if proxy_scope == AptProxyScope.UACLIENT:
                message = "UA-scoped"
            elif proxy_scope == AptProxyScope.GLOBAL:
                message = "global"
            event.info(
                messages.SETTING_SERVICE_PROXY_SCOPE.format(scope=message))

    apt_proxy_config = ""
    if http_proxy:
        if proxy_scope == AptProxyScope.UACLIENT:
            apt_proxy_config += APT_CONFIG_UA_PROXY_HTTP.format(
                proxy_url=http_proxy)
        elif proxy_scope == AptProxyScope.GLOBAL:
            apt_proxy_config += APT_CONFIG_GLOBAL_PROXY_HTTP.format(
                proxy_url=http_proxy)
    if https_proxy:
        if proxy_scope == AptProxyScope.UACLIENT:
            apt_proxy_config += APT_CONFIG_UA_PROXY_HTTPS.format(
                proxy_url=https_proxy)
        elif proxy_scope == AptProxyScope.GLOBAL:
            apt_proxy_config += APT_CONFIG_GLOBAL_PROXY_HTTPS.format(
                proxy_url=https_proxy)

    if apt_proxy_config != "":
        apt_proxy_config = messages.APT_PROXY_CONFIG_HEADER + apt_proxy_config

    if apt_proxy_config == "":
        util.remove_file(APT_PROXY_CONF_FILE)
    else:
        util.write_file(APT_PROXY_CONF_FILE, apt_proxy_config)
Ejemplo n.º 18
0
 def write_cache(self, key: str, content: 'Any') -> None:
     filepath = self.data_path(key)
     data_dir = os.path.dirname(filepath)
     if not os.path.exists(data_dir):
         os.makedirs(data_dir)
     if key.startswith('machine-access') or key == 'machine-token':
         self._machine_token = None
         self._entitlements = None
     if not isinstance(content, str):
         content = json.dumps(content)
     mode = 0o600
     if key in self.data_paths:
         if not self.data_paths[key].private:
             mode = 0o644
     util.write_file(filepath, content, mode=mode)
Ejemplo n.º 19
0
 def write_cache(self, key: str, content: "Any") -> None:
     filepath = self.data_path(key)
     data_dir = os.path.dirname(filepath)
     if not os.path.exists(data_dir):
         os.makedirs(data_dir)
     if key.startswith("machine-access") or key == "machine-token":
         self._machine_token = None
         self._entitlements = None
     if not isinstance(content, str):
         content = json.dumps(content, cls=util.DatetimeAwareJSONEncoder)
     mode = 0o600
     if key in self.data_paths:
         if not self.data_paths[key].private:
             mode = 0o644
     util.write_file(filepath, content, mode=mode)
Ejemplo n.º 20
0
    def test_find_all_apt_list_files_from_apt_config_key(self, m_subp, tmpdir):
        """Find all matching apt list files from apt-config dir."""
        m_subp.return_value = ("key='%s'" % tmpdir.strpath, '')
        repo_url = 'http://c.com/fips-updates/'
        _protocol, repo_path = repo_url.split('://')
        prefix = repo_path.rstrip('/').replace('/', '_')
        paths = sorted([
            tmpdir.join(prefix + '_dists_nomatch').strpath,
            tmpdir.join(prefix + '_dists_xenial_InRelease').strpath,
            tmpdir.join(prefix +
                        '_dists_xenial_main_binary-amd64_Packages').strpath
        ])
        for path in paths:
            util.write_file(path, '')

        assert paths[1:] == find_apt_list_files(repo_url, 'xenial')
Ejemplo n.º 21
0
 def write_cache(self, key: str, content: 'Any', private: bool = True):
     filepath = self.data_path(key, private)
     data_dir = os.path.dirname(filepath)
     if not os.path.exists(data_dir):
         os.makedirs(data_dir)
     if key.startswith('machine-access') or key == 'machine-token':
         self._machine_token = None
         self._entitlements = None
     elif key == 'account-contracts':
         self._contracts = None
     if not isinstance(content, str):
         content = json.dumps(content)
     if private:
         util.write_file(filepath, content, mode=0o600)
     else:
         util.write_file(filepath, content)
Ejemplo n.º 22
0
    def test_find_all_apt_list_files_from_apt_config_key(self, m_subp, tmpdir):
        """Find all matching apt list files from apt-config dir."""
        m_subp.return_value = ("key='{}'".format(tmpdir.strpath), "")
        repo_url = "http://c.com/fips-updates/"
        _protocol, repo_path = repo_url.split("://")
        prefix = repo_path.rstrip("/").replace("/", "_")
        paths = sorted([
            tmpdir.join(prefix + "_dists_nomatch").strpath,
            tmpdir.join(prefix + "_dists_xenial_InRelease").strpath,
            tmpdir.join(prefix +
                        "_dists_xenial_main_binary-amd64_Packages").strpath,
        ])
        for path in paths:
            util.write_file(path, "")

        assert paths[1:] == find_apt_list_files(repo_url, "xenial")
Ejemplo n.º 23
0
def remove_repo_from_apt_auth_file(repo_url):
    """Remove a repo from the shared apt auth file"""
    _protocol, repo_path = repo_url.split("://")
    if repo_path.endswith("/"):  # strip trailing slash
        repo_path = repo_path[:-1]
    apt_auth_file = get_apt_auth_file_from_apt_config()
    if os.path.exists(apt_auth_file):
        apt_auth = util.load_file(apt_auth_file)
        auth_prefix = "machine {repo_path}/ login".format(repo_path=repo_path)
        content = "\n".join([
            line for line in apt_auth.splitlines() if auth_prefix not in line
        ])
        if not content:
            os.unlink(apt_auth_file)
        else:
            util.write_file(apt_auth_file, content, mode=0o600)
Ejemplo n.º 24
0
 def test_parse_os_release(self):
     """parse_os_release returns a dict of values from /etc/os-release."""
     tdir = self.tmp_dir()
     release_file = os.path.join(tdir, 'os-release')
     util.write_file(release_file, OS_RELEASE_TRUSTY)
     expected = {
         'BUG_REPORT_URL': 'http://bugs.launchpad.net/ubuntu/',
         'HOME_URL': 'http://www.ubuntu.com/',
         'ID': 'ubuntu',
         'ID_LIKE': 'debian',
         'NAME': 'Ubuntu',
         'PRETTY_NAME': 'Ubuntu 14.04.5 LTS',
         'SUPPORT_URL': 'http://help.ubuntu.com/',
         'VERSION': '14.04.5 LTS, Trusty Tahr',
         'VERSION_ID': '14.04'
     }
     self.assertEqual(expected, util.parse_os_release(release_file))
Ejemplo n.º 25
0
    def test_remove_all_apt_list_files_from_apt_config_key(
            self, m_subp, tmpdir):
        """Remove all matching apt list files from apt-config dir."""
        m_subp.return_value = ("key='%s'" % tmpdir.strpath, '')
        repo_url = 'http://c.com/fips-updates/'
        _protocol, repo_path = repo_url.split('://')
        prefix = repo_path.rstrip('/').replace('/', '_')
        nomatch_file = tmpdir.join(prefix + '_dists_nomatch').strpath
        paths = [
            nomatch_file,
            tmpdir.join(prefix + '_dists_xenial_InRelease').strpath,
            tmpdir.join(
                prefix + '_dists_xenial_main_binary-amd64_Packages').strpath]
        for path in paths:
            util.write_file(path, '')

        assert None is remove_apt_list_files(repo_url, 'xenial')
        assert [nomatch_file] == glob.glob('%s/*' % tmpdir.strpath)
Ejemplo n.º 26
0
    def test_remove_all_apt_list_files_from_apt_config_key(
            self, m_subp, tmpdir):
        """Remove all matching apt list files from apt-config dir."""
        m_subp.return_value = ("key='{}'".format(tmpdir.strpath), "")
        repo_url = "http://c.com/fips-updates/"
        _protocol, repo_path = repo_url.split("://")
        prefix = repo_path.rstrip("/").replace("/", "_")
        nomatch_file = tmpdir.join(prefix + "_dists_nomatch").strpath
        paths = [
            nomatch_file,
            tmpdir.join(prefix + "_dists_xenial_InRelease").strpath,
            tmpdir.join(prefix +
                        "_dists_xenial_main_binary-amd64_Packages").strpath,
        ]
        for path in paths:
            util.write_file(path, "")

        assert None is remove_apt_list_files(repo_url, "xenial")
        assert [nomatch_file] == glob.glob("{}/*".format(tmpdir.strpath))
Ejemplo n.º 27
0
def add_auth_apt_repo(repo_filename: str, repo_url: str, credentials: str,
                      suites: 'List[str]', keyring_file: str = None) -> None:
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    try:
        username, password = credentials.split(':')
    except ValueError:  # Then we have a bearer token
        username = '******'
        password = credentials
    series = util.get_platform_info('series')
    if repo_url.endswith('/'):
        repo_url = repo_url[:-1]
    if not valid_apt_credentials(repo_url, username, password):
        raise InvalidAPTCredentialsError(
            'Invalid APT credentials provided for %s' % repo_url)

    # Does this system have updates suite enabled?
    policy, _err = util.subp(['apt-cache', 'policy'])
    updates_enabled = bool(' %s-updates/' % series in policy)

    logging.info('Enabling authenticated repo: %s', repo_url)
    content = ''
    for suite in suites:
        if series not in suite:
            continue   # Only enable suites matching this current series
        if '-updates' in suite and not updates_enabled:
            logging.debug(
                'Not enabling apt suite "%s" because "%s-updates" is not'
                ' enabled', suite, series)
            continue
        content += ('deb {url}/ubuntu {suite} main\n'
                    '# deb-src {url}/ubuntu {suite} main\n'.format(
                        url=repo_url, suite=suite))
    util.write_file(repo_filename, content)
    add_apt_auth_conf_entry(repo_url, username, password)
    if keyring_file:
        logging.debug('Copying %s to %s', keyring_file, APT_KEYS_DIR)
        shutil.copy(keyring_file, APT_KEYS_DIR)
Ejemplo n.º 28
0
def add_auth_apt_repo(repo_filename, repo_url, credentials, keyring_file=None,
                      fingerprint=None):
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    series = util.get_platform_info('series')
    if not valid_apt_credentials(repo_url, series, credentials):
        raise InvalidAPTCredentialsError(
            'Invalid APT credentials provided for %s' % repo_url)
    logging.info('Enabling authenticated apt PPA: %s', repo_url)
    content = (
        'deb {url}/ubuntu {series} main\n'
        '# deb-src {url}/ubuntu {series} main\n'.format(
            url=repo_url, series=series))
    util.write_file(repo_filename, content)
    try:
        login, password = credentials.split(':')
    except ValueError:  # Then we have a bearer token
        login = '******'
        password = credentials
    apt_auth_file = get_apt_auth_file_from_apt_config()
    if os.path.exists(apt_auth_file):
        auth_content = util.load_file(apt_auth_file)
    else:
        auth_content = APT_AUTH_HEADER
    _protocol, repo_path = repo_url.split('://')
    auth_content += (
        'machine {repo_path}/ubuntu/ login {login} password'
        ' {password}\n'.format(
            repo_path=repo_path, login=login, password=password))
    util.write_file(apt_auth_file, auth_content, mode=0o600)
    if keyring_file:
        logging.debug('Copying %s to %s', keyring_file, APT_KEYS_DIR)
        shutil.copy(keyring_file, APT_KEYS_DIR)
    elif fingerprint:
        logging.debug('Importing APT PPA key %s', fingerprint)
        util.subp(
            ['apt-key', 'adv', '--keyserver', 'keyserver.ubuntu.com',
             '--recv-keys', fingerprint], capture=True)
Ejemplo n.º 29
0
    def test_replaces_old_credentials_with_new(
            self, m_get_apt_auth_file, tmpdir):
        """Replace old credentials for this repo_url on the same line."""
        auth_file = tmpdir.join('auth.conf').strpath
        util.write_file(auth_file, dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """))

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(
            login='******', password='******', repo_url='http://fakerepo/')

        expected_content = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login newlogin password newpass%s
            machine fakerepo2/ login other password otherpass
        """ % APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
Ejemplo n.º 30
0
def add_auth_apt_repo(repo_filename,
                      repo_url,
                      credentials,
                      keyring_file=None,
                      fingerprint=None,
                      pockets=('main', )):
    """Add an authenticated apt repo and credentials to the system.

    @raises: InvalidAPTCredentialsError when the token provided can't access
        the repo PPA.
    """
    try:
        username, password = credentials.split(':')
    except ValueError:  # Then we have a bearer token
        username = '******'
        password = credentials
    series = util.get_platform_info('series')
    if repo_url.endswith('/'):
        repo_url = repo_url[:-1]
    if not valid_apt_credentials(repo_url, username, password):
        raise InvalidAPTCredentialsError(
            'Invalid APT credentials provided for %s' % repo_url)
    logging.info('Enabling authenticated repo: %s', repo_url)
    content = ''
    for pocket in pockets:
        content += ('deb {url}/ubuntu {series} {pocket}\n'
                    '# deb-src {url}/ubuntu {series} {pocket}\n'.format(
                        url=repo_url, series=series, pocket=pocket))
    util.write_file(repo_filename, content)
    add_apt_auth_conf_entry(repo_url, username, password)
    if keyring_file:
        logging.debug('Copying %s to %s', keyring_file, APT_KEYS_DIR)
        shutil.copy(keyring_file, APT_KEYS_DIR)
    elif fingerprint:
        logging.debug('Importing APT key %s', fingerprint)
        util.subp([
            'apt-key', 'adv', '--keyserver', 'keyserver.ubuntu.com',
            '--recv-keys', fingerprint
        ],
                  capture=True)