def is_viable(self):
     """This machine is a viable AWSInstance"""
     hypervisor_uuid = util.load_file(SYS_HYPERVISOR_PRODUCT_UUID)
     if "ec2" == hypervisor_uuid[0:3]:
         return True
     # Both DMI product_uuid and product_serial start with 'ec2'
     dmi_uuid = util.load_file(DMI_PRODUCT_UUID).lower()
     dmi_serial = util.load_file(DMI_PRODUCT_SERIAL).lower()
     if "ec2" == dmi_uuid[0:3] == dmi_serial[0:3]:
         return True
     return False
 def read_cache(self, key: str, silent: bool = False) -> 'Optional[Any]':
     cache_path = self.data_path(key)
     try:
         content = util.load_file(cache_path)
     except Exception:
         public_cache_path = cache_path.replace('%s/' % PRIVATE_SUBDIR, '')
         try:
             content = util.load_file(public_cache_path)
         except Exception:
             if not os.path.exists(cache_path) and not silent:
                 logging.debug('File does not exist: %s', cache_path)
             return None
     json_content = util.maybe_parse_json(content)
     return json_content if json_content else content
    def test_insert_repo_subroutes_before_existing_repo_basepath(
            self, m_get_apt_auth_file, tmpdir):
        """Insert new repo_url before first matching url base path."""
        auth_file = tmpdir.join('auth.conf').strpath
        util.write_file(
            auth_file,
            dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """))

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(login='******',
                                password='******',
                                repo_url='http://fakerepo/subroute')

        expected_content = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/subroute/ login new password newpass%s
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass\
""" % APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
    def test_add_auth_apt_repo_comments_updates_suites_on_non_update_machine(
        self,
        m_platform,
        m_valid_creds,
        m_get_apt_auth_file,
        m_subp,
        m_gpg_export,
        tmpdir,
    ):
        """Skip any apt suites that don't match the current series."""
        repo_file = tmpdir.join("repo.conf").strpath
        auth_file = tmpdir.join("auth.conf").strpath
        m_get_apt_auth_file.return_value = auth_file
        # apt policy without xenial-updates enabled
        origin = "test-origin"
        m_subp.return_value = (
            POST_INSTALL_APT_CACHE_NO_UPDATES.format("xenial", origin),
            "",
        )

        add_auth_apt_repo(
            repo_filename=repo_file,
            repo_url="http://fakerepo",
            credentials="mycreds",
            suites=("xenial-one", "xenial-updates", "trusty-gone"),
            keyring_file="keyring",
        )

        expected_content = dedent("""\
            deb http://fakerepo/ubuntu xenial-one main
            # deb-src http://fakerepo/ubuntu xenial-one main
            # deb http://fakerepo/ubuntu xenial-updates main
            # deb-src http://fakerepo/ubuntu xenial-updates main
        """)
        assert expected_content == util.load_file(repo_file)
Exemple #5
0
    def test_add_auth_apt_repo_writes_sources_file(
        self,
        m_platform,
        m_valid_creds,
        m_get_apt_auth_file,
        m_subp,
        m_gpg_export,
        tmpdir,
    ):
        """Write a properly configured sources file to repo_filename."""
        repo_file = tmpdir.join("repo.conf").strpath
        auth_file = tmpdir.join("auth.conf").strpath
        m_get_apt_auth_file.return_value = auth_file
        m_subp.return_value = "500 esm.canonical.com...", ""  # apt policy

        add_auth_apt_repo(
            repo_filename=repo_file,
            repo_url="http://fakerepo",
            credentials="mycreds",
            suites=("xenial", ),
            key_id="1",
            keyring_file="keyring",
        )

        expected_content = ("deb http://fakerepo/ubuntu xenial main\n"
                            "# deb-src http://fakerepo/ubuntu xenial main\n")
        assert expected_content == util.load_file(repo_file)
        gpg_export_calls = [mock.call("1", apt.UA_KEYRING_FILE, "keyring")]
        assert gpg_export_calls == m_gpg_export.call_args_list
Exemple #6
0
    def test_add_auth_apt_repo_writes_bearer_resource_token_to_auth_file(
        self,
        m_platform,
        m_valid_creds,
        m_get_apt_auth_file,
        m_subp,
        m_gpg_export,
        tmpdir,
    ):
        """Write apt authentication file when credentials are bearer token."""
        repo_file = tmpdir.join("repo.conf").strpath
        auth_file = tmpdir.join("auth.conf").strpath
        m_get_apt_auth_file.return_value = auth_file
        m_subp.return_value = "500 esm.canonical.com...", ""  # apt policy

        add_auth_apt_repo(
            repo_filename=repo_file,
            repo_url="http://fakerepo/",
            credentials="SOMELONGTOKEN",
            suites=("xenia", ),
            key_id="1",
            keyring_file="keyring",
        )

        expected_content = ("machine fakerepo/ login bearer password"
                            " SOMELONGTOKEN{}\n".format(APT_AUTH_COMMENT))
        assert expected_content == util.load_file(auth_file)
Exemple #7
0
    def test_replaces_old_credentials_with_new(self, m_get_apt_auth_file,
                                               tmpdir):
        """Replace old credentials for this repo_url on the same line."""
        auth_file = tmpdir.join("auth.conf").strpath
        util.write_file(
            auth_file,
            dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """),
        )

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(login="******",
                                password="******",
                                repo_url="http://fakerepo/")

        content_template = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login newlogin password newpass{}
            machine fakerepo2/ login other password otherpass
        """)
        expected_content = content_template.format(APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
def parse_config(config_path=None):
    """Parse known UA config file

    Attempt to find configuration in cwd and fallback to DEFAULT_CONFIG_FILE.
    Any missing configuration keys will be set to CONFIG_DEFAULTS.

    Values are overridden by any environment variable with prefix 'UA_'.

    @param config_path: Fullpath to ua configfile. If unspecified, use
        DEFAULT_CONFIG_FILE.

    @raises: ConfigAbsentError when no config file is discovered.
    @return: Dict of configuration values.
    """
    if not config_path:
        config_path = DEFAULT_CONFIG_FILE
    cfg = copy.copy(CONFIG_DEFAULTS)
    local_cfg = os.path.join(os.getcwd(), os.path.basename(config_path))
    if os.path.exists(local_cfg):
        config_path = local_cfg
    if os.environ.get('UA_CONFIG_FILE'):
        config_path = os.environ.get('UA_CONFIG_FILE')
    LOG.debug('Using UA client configuration file at %s', config_path)
    if os.path.exists(config_path):
        cfg.update(yaml.safe_load(util.load_file(config_path)))
    env_keys = {}
    for key, value in os.environ.items():
        if key.startswith('UA_'):
            env_keys[key.lower()[3:]] = value  # Strip leading UA_
    cfg.update(env_keys)
    cfg['log_level'] = cfg['log_level'].upper()
    cfg['data_dir'] = os.path.expanduser(cfg['data_dir'])
    return cfg
Exemple #9
0
    def test_insert_repo_subroutes_before_existing_repo_basepath(
            self, m_get_apt_auth_file, tmpdir):
        """Insert new repo_url before first matching url base path."""
        auth_file = tmpdir.join("auth.conf").strpath
        util.write_file(
            auth_file,
            dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """),
        )

        m_get_apt_auth_file.return_value = auth_file

        add_apt_auth_conf_entry(
            login="******",
            password="******",
            repo_url="http://fakerepo/subroute",
        )

        content_template = dedent("""\
            machine fakerepo1/ login me password password1
            machine fakerepo/subroute/ login new password newpass{}
            machine fakerepo/ login old password oldpassword
            machine fakerepo2/ login other password otherpass
        """)
        expected_content = content_template.format(APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
def remove_auth_apt_repo(repo_filename,
                         repo_url,
                         keyring_file=None,
                         fingerprint=None):
    """Remove an authenticated apt repo and credentials to the system"""
    logging.info('Removing authenticated apt repo: %s', repo_url)
    util.del_file(repo_filename)
    if keyring_file:
        util.del_file(keyring_file)
    elif fingerprint:
        util.subp(['apt-key', 'del', fingerprint], capture=True)
    _protocol, repo_path = repo_url.split('://')
    if repo_path.endswith('/'):  # strip trailing slash
        repo_path = repo_path[:-1]
    apt_auth_file = get_apt_auth_file_from_apt_config()
    if os.path.exists(apt_auth_file):
        apt_auth = util.load_file(apt_auth_file)
        auth_prefix = 'machine {repo_path}/ login'.format(repo_path=repo_path)
        content = '\n'.join([
            line for line in apt_auth.splitlines() if auth_prefix not in line
        ])
        if not content:
            os.unlink(apt_auth_file)
        else:
            util.write_file(apt_auth_file, content, mode=0o600)
def add_apt_auth_conf_entry(repo_url, login, password):
    """Add or replace an apt auth line in apt's auth.conf file or conf.d."""
    apt_auth_file = get_apt_auth_file_from_apt_config()
    _protocol, repo_path = repo_url.split('://')
    if repo_path.endswith('/'):  # strip trailing slash
        repo_path = repo_path[:-1]
    if os.path.exists(apt_auth_file):
        orig_content = util.load_file(apt_auth_file)
    else:
        orig_content = ''
    repo_auth_line = (
        'machine {repo_path}/ login {login} password {password}{cmt}'.format(
            repo_path=repo_path,
            login=login,
            password=password,
            cmt=APT_AUTH_COMMENT))
    added_new_auth = False
    new_lines = []
    for line in orig_content.splitlines():
        machine_match = re.match(r'machine\s+(?P<repo_url>[.\-\w]+)/?.*', line)
        if machine_match:
            matched_repo = machine_match.group('repo_url')
            if matched_repo == repo_path:
                # Replace old auth with new auth at same line
                new_lines.append(repo_auth_line)
                added_new_auth = True
                continue
            if matched_repo in repo_path:
                # Insert our repo before. We are a more specific apt repo match
                new_lines.append(repo_auth_line)
                added_new_auth = True
        new_lines.append(line)
    if not added_new_auth:
        new_lines.append(repo_auth_line)
    util.write_file(apt_auth_file, '\n'.join(new_lines), mode=0o600)
 def is_viable(self):
     """This machine is a viable AWSInstance"""
     try:
         hypervisor_uuid = util.load_file(SYS_HYPERVISOR_PRODUCT_UUID)
         if "ec2" == hypervisor_uuid[0:3]:
             return True
     except FileNotFoundError:
         # SYS_HYPERVISOR_PRODUCT_UUID isn't present on all EC2 instance
         # types, fall through
         pass
     # Both DMI product_uuid and product_serial start with 'ec2'
     dmi_uuid = util.load_file(DMI_PRODUCT_UUID).lower()
     dmi_serial = util.load_file(DMI_PRODUCT_SERIAL).lower()
     if "ec2" == dmi_uuid[0:3] == dmi_serial[0:3]:
         return True
     return False
Exemple #13
0
    def _get_response_overlay(self, url: str):
        """Return a list of fake response dicts for a given URL.

        serviceclient_url_responses in uaclient.conf should be a path
        to a json file which contains a dictionary keyed by full URL path.
        Each value will be a list of dicts representing each faked response
        for the given URL.

            The response dict item will have a code: <HTTP_STATUS_CODE> and
               response: "some string of content".
            The JSON string below fakes the available_resources URL on the
            contract server:
            '{"https://contracts.canonical.com/v1/resources": \
               [{"code": 200, "response": {"key": "val1", "key2": "val2"}}]}'

        :return: List of dicts for each faked response matching the url, or
           and empty list when no matching url found.
        """
        if self._response_overlay is not None:
            # Cache it so we don't re-read config every readurl call
            return self._response_overlay.get(url, [])
        response_overlay_path = self.cfg.features.get(
            "serviceclient_url_responses")
        if not response_overlay_path:
            self._response_overlay = {}
        elif not os.path.exists(response_overlay_path):
            self._response_overlay = {}
        else:
            self._response_overlay = json.loads(
                util.load_file(response_overlay_path))
        return self._response_overlay.get(url, [])
Exemple #14
0
 def is_viable(self) -> bool:
     """This machine is a viable AzureInstance"""
     if os.path.exists(DMI_CHASSIS_ASSET_TAG):
         chassis_asset_tag = util.load_file(DMI_CHASSIS_ASSET_TAG)
         if AZURE_CHASSIS_ASSET_TAG == chassis_asset_tag.strip():
             return True
     return os.path.exists(AZURE_OVF_ENV_FILE)
    def is_viable(self) -> bool:
        """This machine is a viable GCPInstance"""
        if os.path.exists(DMI_PRODUCT_NAME):
            product_name = util.load_file(DMI_PRODUCT_NAME)
            if GCP_PRODUCT_NAME == product_name.strip():
                return True

        return False
Exemple #16
0
 def read_cache(self, key, quiet=False):
     cache_path = self.data_path(key)
     if not os.path.exists(cache_path):
         if not quiet:
             logging.debug('File does not exist: %s', cache_path)
         return None
     content = util.load_file(cache_path)
     json_content = util.maybe_parse_json(content)
     return json_content if json_content else content
Exemple #17
0
    def test_get_machine_id_create_machine_id_in_data_dir(self):
        """When no machine-id is found, create one in data_dir using uuid4."""

        data_machine_id = self.tmp_path('machine-id', dir=self.tdir)

        with mock.patch('uaclient.util.os.path.exists') as m_exists:
            with mock.patch('uaclient.util.uuid.uuid4') as m_uuid4:
                m_exists.return_value = False
                m_uuid4.return_value = '1234...1234'
                value = util.get_machine_id(data_dir=self.tdir)
        self.assertEqual('1234...1234', value)
        self.assertEqual('1234...1234', util.load_file(data_machine_id))
Exemple #18
0
 def read_cache(self, key: str, silent: bool = False) -> "Optional[Any]":
     cache_path = self.data_path(key)
     try:
         content = util.load_file(cache_path)
     except Exception:
         if not os.path.exists(cache_path) and not silent:
             logging.debug("File does not exist: %s", cache_path)
         return None
     try:
         return json.loads(content, cls=util.DatetimeAwareJSONDecoder)
     except ValueError:
         return content
Exemple #19
0
 def test_write_apt_pin_file_to_apt_preferences(self, m_platform, tmpdir):
     """Write proper apt pin file to specified apt_preference_file."""
     m_platform.return_value = 'xenial'
     pref_file = tmpdir.join('preffile').strpath
     assert None is add_ppa_pinning(
         pref_file, repo_url='http://fakerepo', origin='MYORIG',
         priority=1003)
     expected_pref = dedent('''\
         Package: *
         Pin: release o=MYORIG, n=xenial
         Pin-Priority: 1003\n''')
     assert expected_pref == util.load_file(pref_file)
Exemple #20
0
def get_instance_id(
        _iid_file: str = CLOUDINIT_INSTANCE_ID_FILE) -> "Optional[str]":
    """Query cloud instance-id from cmdline or CLOUDINIT_INSTANCE_ID_FILE"""
    if "trusty" != util.get_platform_info()["series"]:
        # Present in cloud-init on >= Xenial
        out, _err = util.subp(["cloud-init", "query", "instance_id"])
        return out.strip()
    if os.path.exists(_iid_file):
        return util.load_file(_iid_file)
    logging.warning("Unable to determine current instance-id from %s",
                    _iid_file)
    return None
Exemple #21
0
    def _check_apt_url_is_applied(self, apt_url):
        """Check if apt url delta should be applied.

        :param apt_url: string containing the apt url to be used.

        :return: False if apt url is already found on the source file.
                 True otherwise.
        """
        apt_file = self.repo_list_file_tmpl.format(name=self.name)
        # If the apt file is commented out, we will assume that we need
        # to regenerate the apt file, regardless of the apt url delta
        if all(
                line.startswith("#")
                for line in util.load_file(apt_file).strip().split("\n")):
            return False

        # If the file is not commented out and we don't have delta,
        # we will not do anything
        if not apt_url:
            return True

        # If the delta is already in the file, we won't reconfigure it
        # again
        return bool(apt_url in util.load_file(apt_file))
    def parse_machine_token_overlay(self, machine_token_overlay_path):
        if not os.path.exists(machine_token_overlay_path):
            raise exceptions.UserFacingError(
                status.INVALID_PATH_FOR_MACHINE_TOKEN_OVERLAY.format(
                    file_path=machine_token_overlay_path))

        try:
            machine_token_overlay_content = util.load_file(
                machine_token_overlay_path)

            return json.loads(machine_token_overlay_content)
        except ValueError as e:
            raise exceptions.UserFacingError(
                status.ERROR_JSON_DECODING_IN_FILE.format(
                    error=str(e), file_path=machine_token_overlay_path))
Exemple #23
0
 def test_write_apt_pin_file_to_apt_preferences(self, m_platform, tmpdir):
     """Write proper apt pin file to specified apt_preference_file."""
     m_platform.return_value = {"series": "xenial"}
     pref_file = tmpdir.join("preffile").strpath
     assert None is add_ppa_pinning(
         pref_file,
         repo_url="http://fakerepo",
         origin="MYORIG",
         priority=1003,
     )
     expected_pref = dedent("""\
         Package: *
         Pin: release o=MYORIG, n=xenial
         Pin-Priority: 1003\n""")
     assert expected_pref == util.load_file(pref_file)
def remove_repo_from_apt_auth_file(repo_url):
    """Remove a repo from the shared apt auth file"""
    _protocol, repo_path = repo_url.split("://")
    if repo_path.endswith("/"):  # strip trailing slash
        repo_path = repo_path[:-1]
    apt_auth_file = get_apt_auth_file_from_apt_config()
    if os.path.exists(apt_auth_file):
        apt_auth = util.load_file(apt_auth_file)
        auth_prefix = "machine {repo_path}/ login".format(repo_path=repo_path)
        content = "\n".join([
            line for line in apt_auth.splitlines() if auth_prefix not in line
        ])
        if not content:
            os.unlink(apt_auth_file)
        else:
            util.write_file(apt_auth_file, content, mode=0o600)
Exemple #25
0
    def test_add_auth_apt_repo_writes_sources_file(
            self, m_platform, m_valid_creds, m_get_apt_auth_file, m_subp,
            tmpdir):
        """Write a properly configured sources file to repo_filename."""
        repo_file = tmpdir.join('repo.conf').strpath
        auth_file = tmpdir.join('auth.conf').strpath
        m_get_apt_auth_file.return_value = auth_file
        m_subp.return_value = '500 esm.canonical.com...', ''  # apt policy

        add_auth_apt_repo(
            repo_filename=repo_file, repo_url='http://fakerepo',
            credentials='mycreds', suites=('xenial',))

        expected_content = (
            'deb http://fakerepo/ubuntu xenial main\n'
            '# deb-src http://fakerepo/ubuntu xenial main\n')
        assert expected_content == util.load_file(repo_file)
Exemple #26
0
    def test_add_auth_apt_repo_writes_bearer_resource_token_to_auth_file(
            self, m_platform, m_valid_creds, m_get_apt_auth_file, m_subp,
            tmpdir):
        """Write apt authentication file when credentials are bearer token."""
        repo_file = tmpdir.join('repo.conf').strpath
        auth_file = tmpdir.join('auth.conf').strpath
        m_get_apt_auth_file.return_value = auth_file
        m_subp.return_value = '500 esm.canonical.com...', ''  # apt policy

        add_auth_apt_repo(
            repo_filename=repo_file, repo_url='http://fakerepo/',
            credentials='SOMELONGTOKEN', suites=('xenia',))

        expected_content = (
            'machine fakerepo/ login bearer password SOMELONGTOKEN%s\n'
            % APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
    def test_add_auth_apt_repo_writes_sources_file(self, m_platform,
                                                   m_valid_creds,
                                                   m_get_apt_auth_file, m_subp,
                                                   tmpdir):
        """Write a properly configured sources file to repo_filename."""
        repo_file = tmpdir.join('repo.conf').strpath
        auth_file = tmpdir.join('auth.conf').strpath
        m_get_apt_auth_file.return_value = auth_file

        add_auth_apt_repo(repo_filename=repo_file,
                          repo_url='http://fakerepo',
                          credentials='mycreds',
                          fingerprint='APTKEY')

        expected_content = ('deb http://fakerepo/ubuntu xenial main\n'
                            '# deb-src http://fakerepo/ubuntu xenial main\n')
        assert expected_content == util.load_file(repo_file)
    def test_add_auth_apt_repo_writes_username_password_to_auth_file(
            self, m_platform, m_valid_creds, m_get_apt_auth_file, m_subp,
            tmpdir):
        """Write apt authentication file when credentials are user:pwd."""
        repo_file = tmpdir.join('repo.conf').strpath
        auth_file = tmpdir.join('auth.conf').strpath
        m_get_apt_auth_file.return_value = auth_file

        add_auth_apt_repo(repo_filename=repo_file,
                          repo_url='http://fakerepo',
                          credentials='user:password',
                          fingerprint='APTKEY')

        expected_content = (
            'machine fakerepo/ login user password password%s' %
            APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
    def test_add_auth_apt_repo_writes_bearer_resource_token_to_auth_file(
            self, m_platform, m_valid_creds, m_get_apt_auth_file, m_subp,
            tmpdir):
        """Write apt authentication file when credentials are bearer token."""
        repo_file = tmpdir.join('repo.conf').strpath
        auth_file = tmpdir.join('auth.conf').strpath
        m_get_apt_auth_file.return_value = auth_file

        add_auth_apt_repo(repo_filename=repo_file,
                          repo_url='http://fakerepo/',
                          credentials='SOMELONGTOKEN',
                          fingerprint='APTKEY')

        expected_content = (
            'machine fakerepo/ login bearer password SOMELONGTOKEN%s' %
            APT_AUTH_COMMENT)
        assert expected_content == util.load_file(auth_file)
    def test_write_apps_or_infra_services_emits_no_warranty(
        self,
        get_contract_expiry_status,
        util_is_active_esm,
        write_esm_service_templates,
        m_entitlement_factory,
        is_active_esm,
        contract_expiry_status,
        infra_enabled,
        no_warranty,
        FakeConfig,
    ):
        util_is_active_esm.return_value = is_active_esm
        if infra_enabled:
            infra_status = ApplicationStatus.ENABLED
        else:
            infra_status = ApplicationStatus.DISABLED
        infra_cls = mock.MagicMock()
        infra_obj = infra_cls.return_value
        infra_obj.application_status.return_value = (infra_status, "")
        infra_obj.name = "esm-infra"

        def factory_side_effect(cfg, name):
            if name == "esm-infra":
                return infra_cls
            if name == "esm-apps":
                return mock.MagicMock()

        m_entitlement_factory.side_effect = factory_side_effect

        get_contract_expiry_status.return_value = (
            contract_expiry_status,
            -12355,  # unused in this test
        )
        cfg = FakeConfig.for_attached_machine()
        msg_dir = os.path.join(cfg.data_dir, "messages")
        os.makedirs(msg_dir)

        write_apt_and_motd_templates(cfg, "xenial")
        assert [mock.call("xenial")] == util_is_active_esm.call_args_list
        no_warranty_file = os.path.join(msg_dir, "ubuntu-no-warranty")
        if no_warranty:
            assert UBUNTU_NO_WARRANTY == util.load_file(no_warranty_file)
        else:
            assert False is os.path.exists(no_warranty_file)