def test_accounts_extracts_accounts_key_from_account_read_cache( self, tmpdir): """Config.accounts property extracts the accounts key from cache.""" cfg = UAConfig({'data_dir': tmpdir.strpath}) cfg.write_cache('accounts', {'accounts': ['acct1', 'acct2']}) assert ['acct1', 'acct2'] == cfg.accounts
def status(cfg: UAConfig, show_beta: bool = False) -> Dict[str, Any]: """Return status as a dict, using a cache for non-root users When unattached, get available resources from the contract service to report detailed availability of different resources for this machine. Write the status-cache when called by root. """ if os.getuid() != 0: response = cast("Dict[str, Any]", cfg.read_cache("status-cache")) if not response: response = _unattached_status(cfg) elif not cfg.is_attached: response = _unattached_status(cfg) else: response = _attached_status(cfg) response.update(_get_config_status(cfg)) if os.getuid() == 0: cfg.write_cache("status-cache", response) # Try to remove fix reboot notices if not applicable if not util.should_reboot(): cfg.remove_notice( "", messages.ENABLE_REBOOT_REQUIRED_TMPL.format( operation="fix operation" ), ) response = _handle_beta_resources(cfg, show_beta, response) return response
def attach_with_token( cfg: config.UAConfig, token: str, allow_enable: bool ) -> None: """ Common functionality to take a token and attach via contract backend :raise UrlError: On unexpected connectivity issues to contract server or inability to access identity doc from metadata service. :raise ContractAPIError: On unexpected errors when talking to the contract server. """ from uaclient.jobs.update_messaging import update_apt_and_motd_messages try: contract.request_updated_contract( cfg, token, allow_enable=allow_enable ) except exceptions.UrlError as exc: # Persist updated status in the event of partial attach ua_status.status(cfg=cfg) update_apt_and_motd_messages(cfg) raise exc except exceptions.UserFacingError as exc: # Persist updated status in the event of partial attach ua_status.status(cfg=cfg) update_apt_and_motd_messages(cfg) raise exc current_iid = identity.get_instance_id() if current_iid: cfg.write_cache("instance-id", current_iid) update_apt_and_motd_messages(cfg)
def test_root_followed_by_nonroot( self, m_getuid, m_get_available_resources, _m_should_reboot, _m_remove_notice, tmpdir, FakeConfig, ): """Ensure that non-root run after root returns data""" cfg = UAConfig({"data_dir": tmpdir.strpath}) # Run as root m_getuid.return_value = 0 before = copy.deepcopy(status.status(cfg=cfg)) # Replicate an attach by modifying the underlying config and confirm # that we see different status other_cfg = FakeConfig.for_attached_machine() cfg.write_cache("accounts", {"accounts": other_cfg.accounts}) cfg.write_cache("machine-token", other_cfg.machine_token) cfg.delete_cache_key("status-cache") assert status._attached_status(cfg=cfg) != before # Run as regular user and confirm that we see the result from # last time we called .status() m_getuid.return_value = 1000 after = status.status(cfg=cfg) assert before == after
def test_write_datetime(self, tmpdir): cfg = UAConfig({"data_dir": tmpdir.strpath}) key = "test_key" dt = datetime.datetime.now() cfg.write_cache(key, dt) with open(cfg.data_path(key)) as f: assert dt.isoformat() == f.read().strip('"')
def test_delete_cache_unsets_entitlements(self): """The delete_cache unsets any cached entitlements content.""" tmp_dir = self.tmp_dir() cfg = UAConfig({'data_dir': tmp_dir}) token = { 'machineTokenInfo': { 'contractInfo': { 'resourceEntitlements': [{ 'type': 'entitlement1', 'entitled': True }] } } } cfg.write_cache('machine-token', token) previous_entitlements = { 'entitlement1': { 'entitlement': { 'type': 'entitlement1', 'entitled': True } } } assert previous_entitlements == cfg.entitlements cfg.delete_cache() assert {} == cfg.entitlements
def test_delete_cache_unsets_entitlements(self, tmpdir): """The delete_cache unsets any cached entitlements content.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) token = { "machineTokenInfo": { "contractInfo": { "resourceEntitlements": [{ "type": "entitlement1", "entitled": True }] } } } cfg.write_cache("machine-token", token) previous_entitlements = { "entitlement1": { "entitlement": { "type": "entitlement1", "entitled": True } } } assert previous_entitlements == cfg.entitlements cfg.delete_cache() assert {} == cfg.entitlements
def test_entitlements_property_keyed_by_entitlement_name(self, tmpdir): """Return machine_token resourceEntitlements, keyed by name.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) token = { "machineTokenInfo": { "contractInfo": { "resourceEntitlements": [ { "type": "entitlement1", "entitled": True }, { "type": "entitlement2", "entitled": True }, ] } } } cfg.write_cache("machine-token", token) expected = { "entitlement1": { "entitlement": { "entitled": True, "type": "entitlement1" } }, "entitlement2": { "entitlement": { "entitled": True, "type": "entitlement2" } }, } assert expected == cfg.entitlements
def test_entitlements_uses_resource_token_from_machine_token(self, tmpdir): """Include entitlement-specicific resourceTokens from machine_token""" cfg = UAConfig({"data_dir": tmpdir.strpath}) token = { "availableResources": ALL_RESOURCES_AVAILABLE, "machineTokenInfo": { "contractInfo": { "resourceEntitlements": [ {"type": "entitlement1", "entitled": True}, {"type": "entitlement2", "entitled": True}, ] } }, "resourceTokens": [ {"type": "entitlement1", "token": "ent1-token"}, {"type": "entitlement2", "token": "ent2-token"}, ], } cfg.write_cache("machine-token", token) expected = { "entitlement1": { "entitlement": {"entitled": True, "type": "entitlement1"}, "resourceToken": "ent1-token", }, "entitlement2": { "entitlement": {"entitled": True, "type": "entitlement2"}, "resourceToken": "ent2-token", }, } assert expected == cfg.entitlements
def test_nonroot_user_uses_cache_if_available(self, m_getuid, tmpdir): m_getuid.return_value = 1000 status = {"pass": True} cfg = UAConfig({"data_dir": tmpdir.strpath}) cfg.write_cache("status-cache", status) assert status == cfg.status()
def test_accounts_logs_warning_when_missing_accounts_key_in_cache(self): """Config.accounts warns when missing 'accounts' key in cache""" tmp_dir = self.tmp_dir() cfg = UAConfig({'data_dir': tmp_dir}) cfg.write_cache('accounts', {'non-accounts': 'somethingelse'}) assert [] == cfg.accounts expected_warning = ("WARNING: Missing 'accounts' key in cache %s" % self.tmp_path('accounts.json', tmp_dir)) assert expected_warning in self.logs
def test_accounts_extracts_accounts_key_from_machine_token_cache( self, tmpdir): """Use machine_token cached accountInfo when no accounts cache.""" cfg = UAConfig({'data_dir': tmpdir.strpath}) accountInfo = {'id': '1', 'name': 'accountname'} cfg.write_cache('machine-token', {'machineTokenInfo': {'accountInfo': accountInfo}}) assert [accountInfo] == cfg.accounts
def test_accounts_logs_warning_when_non_dictionary_cache_content( self, caplog_text, tmpdir): """Config.accounts warns and returns empty list on non-dict cache.""" cfg = UAConfig({'data_dir': tmpdir.strpath}) cfg.write_cache('accounts', 'non-dict-value') assert [] == cfg.accounts expected_warning = ( "WARNING Unexpected type <class 'str'> in cache %s" % ( tmpdir.join(PRIVATE_SUBDIR, 'accounts.json'))) assert expected_warning in caplog_text()
def test_accounts_logs_warning_when_missing_accounts_key_in_cache( self, caplog_text, tmpdir): """Config.accounts warns when missing 'accounts' key in cache""" cfg = UAConfig({'data_dir': tmpdir.strpath}) cfg.write_cache('accounts', {'non-accounts': 'somethingelse'}) assert [] == cfg.accounts expected_warning = ( "WARNING Missing 'accounts' key in cache %s" % tmpdir.join(PRIVATE_SUBDIR, 'accounts.json')) assert expected_warning in caplog_text()
def test_accounts_logs_warning_when_non_list_accounts_cache_content(self): """Config.accounts warns on non-list accounts key.""" tmp_dir = self.tmp_dir() cfg = UAConfig({'data_dir': tmp_dir}) cfg.write_cache('accounts', {'accounts': 'non-list-value'}) assert [] == cfg.accounts expected_warning = ( "WARNING: Unexpected 'accounts' type <class 'str'> in cache %s" % (self.tmp_path('accounts.json', tmp_dir))) assert expected_warning in self.logs
def test_accounts_logs_warning_when_non_dictionary_cache_content(self): """Config.accounts warns and returns empty list on non-dict cache.""" tmp_dir = self.tmp_dir() cfg = UAConfig({'data_dir': tmp_dir}) cfg.write_cache('accounts', 'non-dict-value') assert [] == cfg.accounts expected_warning = ( "WARNING: Unexpected type <class 'str'> in cache %s" % (self.tmp_path('accounts.json', tmp_dir))) assert expected_warning in self.logs
def test_accounts_logs_warning_when_non_list_accounts_cache_content( self, caplog_text, tmpdir): """Config.accounts warns on non-list accounts key.""" cfg = UAConfig({'data_dir': tmpdir.strpath}) cfg.write_cache('accounts', {'accounts': 'non-list-value'}) assert [] == cfg.accounts expected_warning = ( "WARNING Unexpected 'accounts' type <class 'str'> in cache %s" % ( tmpdir.join(PRIVATE_SUBDIR, 'accounts.json'))) assert expected_warning in caplog_text()
def test_delete_cache_removes_any_cached_data_path_files(self, tmpdir): """Any cached files defined in cfg.data_paths will be removed.""" cfg = UAConfig({'data_dir': tmpdir.strpath}) # Create half of the cached files, but not all odd_keys = list(cfg.data_paths.keys())[::2] for odd_key in odd_keys: cfg.write_cache(odd_key, odd_key) assert len(odd_keys) == len(os.listdir(tmpdir.strpath)) cfg.delete_cache() dirty_files = os.listdir(tmpdir.strpath) assert 0 == len(dirty_files), '%d files not deleted' % len(dirty_files)
def test_accounts_extracts_accounts_key_from_machine_token_cache( self, tmpdir): """Use machine_token cached accountInfo when no accounts cache.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) accountInfo = {"id": "1", "name": "accountname"} cfg.write_cache("machine-token", {"machineTokenInfo": { "accountInfo": accountInfo }}) assert [accountInfo] == cfg.accounts
def _get_contract_token_from_cloud_identity(cfg: config.UAConfig) -> str: """Detect cloud_type and request a contract token from identity info. :param cfg: a ``config.UAConfig`` instance :raise NonAutoAttachImageError: When not on an auto-attach image type. :raise UrlError: On unexpected connectivity issues to contract server or inability to access identity doc from metadata service. :raise ContractAPIError: On unexpected errors when talking to the contract server. :raise NonAutoAttachImageError: If this cloud type does not have auto-attach support. :return: contract token obtained from identity doc """ try: instance = identity.cloud_instance_factory() except exceptions.UserFacingError as e: if cfg.is_attached: # We are attached on non-Pro Image, just report already attached raise exceptions.AlreadyAttachedError(cfg) # Unattached on non-Pro return UserFacing error msg details raise e current_iid = identity.get_instance_id() if cfg.is_attached: prev_iid = cfg.read_cache("instance-id") if current_iid == prev_iid: raise exceptions.AlreadyAttachedError(cfg) print("Re-attaching Ubuntu Advantage subscription on new instance") if _detach(cfg, assume_yes=True) != 0: raise exceptions.UserFacingError( ua_status.MESSAGE_DETACH_AUTOMATION_FAILURE ) contract_client = contract.UAContractClient(cfg) try: tokenResponse = contract_client.request_auto_attach_contract_token( instance=instance ) except contract.ContractAPIError as e: if e.code and 400 <= e.code < 500: raise exceptions.NonAutoAttachImageError( ua_status.MESSAGE_UNSUPPORTED_AUTO_ATTACH ) raise e if current_iid: cfg.write_cache("instance-id", current_iid) return tokenResponse["contractToken"]
def test_accounts_extracts_accounts_key_from_machine_token_cache( self, tmpdir ): """Use machine_token cached accountInfo when no accounts cache.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) accountInfo = {"id": "1", "name": "accountname"} cfg.write_cache( "machine-token", { "availableResources": ALL_RESOURCES_AVAILABLE, "machineTokenInfo": {"accountInfo": accountInfo}, }, ) assert [accountInfo] == cfg.accounts
def test_nonroot_user_uses_cache_and_updates_if_available( self, _m_should_reboot, m_getuid, tmpdir): m_getuid.return_value = 1000 status = {"pass": True} cfg = UAConfig({"data_dir": tmpdir.strpath}) cfg.write_cache("status-cache", status) # Even non-root users can update configStatus details details = MESSAGE_ENABLE_REBOOT_REQUIRED_TMPL.format( operation="configuration changes") status.update({ "configStatus": UserFacingConfigStatus.REBOOTREQUIRED.value, "configStatusDetails": details, }) assert status == cfg.status()
def test_entitlements_use_machine_access_when_present(self, tmpdir): """Return specific machine-access info if present.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) token = { "availableResources": ALL_RESOURCES_AVAILABLE, "machineTokenInfo": { "contractInfo": { "resourceEntitlements": [ { "type": "entitlement1", "entitled": True }, { "type": "entitlement2", "entitled": True }, ] } }, } cfg.write_cache("machine-token", token) cfg.write_cache( "machine-access-entitlement1", { "entitlement": { "type": "entitlement1", "entitled": True, "more": "data", } }, ) expected = { "entitlement1": { "entitlement": { "entitled": True, "type": "entitlement1", "more": "data", } }, "entitlement2": { "entitlement": { "entitled": True, "type": "entitlement2" } }, } assert expected == cfg.entitlements
def test_delete_cache_removes_any_cached_data_path_files(self, tmpdir): """Any cached files defined in cfg.data_paths will be removed.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) # Create half of the cached files, but not all odd_keys = list(cfg.data_paths.keys())[::2] for odd_key in odd_keys: cfg.write_cache(odd_key, odd_key) present_files = list( itertools.chain( *[walk_entry[2] for walk_entry in os.walk(tmpdir.strpath)])) assert len(odd_keys) == len(present_files) cfg.delete_cache() dirty_files = list( itertools.chain( *[walk_entry[2] for walk_entry in os.walk(tmpdir.strpath)])) assert 0 == len(dirty_files), "{} files not deleted".format( len(dirty_files))
def test_write_cache_writes_non_private_dir_when_private_is_false( self, tmpdir): """When content is not a string, write a json string.""" cfg = UAConfig({'data_dir': tmpdir.strpath}) assert None is cfg.write_cache('key', 'value', private=False) with open(tmpdir.join('key').strpath, 'r') as stream: assert 'value' == stream.read() assert 'value' == cfg.read_cache('key')
def test_write_cache_writes_json_string_when_content_not_a_string( self, tmpdir, key, value): """When content is not a string, write a json string.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) expected_json_content = json.dumps(value) assert None is cfg.write_cache(key, value) with open(tmpdir.join(PRIVATE_SUBDIR, key).strpath, "r") as stream: assert expected_json_content == stream.read() assert value == cfg.read_cache(key)
def test_write_cache_creates_secure_private_dir(self, tmpdir): """private_dir is created with permission 0o700.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) # unknown keys are written to the private dir expected_dir = tmpdir.join(PRIVATE_SUBDIR) assert None is cfg.write_cache("somekey", "somevalue") assert True is os.path.isdir( expected_dir.strpath), "Missing expected directory {}".format( expected_dir) assert 0o700 == stat.S_IMODE(os.lstat(expected_dir.strpath).st_mode)
def test_write_cache_creates_dir_when_data_dir_does_not_exist(self): """When data_dir doesn't exist, create it.""" tmp_subdir = self.tmp_path('does/not/exist') cfg = UAConfig({'data_dir': tmp_subdir}) assert False is os.path.isdir(tmp_subdir), ( 'Found unexpected directory %s' % tmp_subdir) assert None is cfg.write_cache('somekey', 'someval') assert True is os.path.isdir(tmp_subdir), ( 'Missing expected directory %s' % tmp_subdir) assert 'someval' == cfg.read_cache('somekey')
def test_write_cache_writes_json_string_when_content_not_a_string( self, key, value): """When content is not a string, write a json string.""" tmp_dir = self.tmp_dir() cfg = UAConfig({'data_dir': tmp_dir}) expected_json_content = json.dumps(value) assert None is cfg.write_cache(key, value) with open(self.tmp_path(key, tmp_dir), 'r') as stream: assert expected_json_content == stream.read() assert value == cfg.read_cache(key)
def test_write_cache_write_key_name_in_data_dir_when_data_path_absent( self, tmpdir, key, content): """When key is not in data_paths, write content to data_dir/key.""" cfg = UAConfig({"data_dir": tmpdir.strpath}) expected_path = tmpdir.join(PRIVATE_SUBDIR, key) assert not expected_path.check(), "Found unexpected file {}".format( expected_path) assert None is cfg.write_cache(key, content) assert expected_path.check(), "Missing expected file {}".format( expected_path) assert content == cfg.read_cache(key)