def wait_for_metadata_service(self): urls = self.ds_cfg.get("metadata_urls", [DEF_MD_URL]) filtered = [x for x in urls if util.is_resolvable_url(x)] if set(filtered) != set(urls): LOG.debug("Removed the following from metadata urls: %s", list((set(urls) - set(filtered)))) if len(filtered): urls = filtered else: LOG.warn("Empty metadata url list! using default list") urls = [DEF_MD_URL] md_urls = [] url2base = {} for url in urls: md_url = url_helper.combine_url(url, 'openstack') md_urls.append(md_url) url2base[md_url] = url (max_wait, timeout) = self._get_url_settings() start_time = time.time() avail_url = url_helper.wait_for_url(urls=md_urls, max_wait=max_wait, timeout=timeout) if avail_url: LOG.debug("Using metadata source: '%s'", url2base[avail_url]) else: LOG.debug("Giving up on OpenStack md from %s after %s seconds", md_urls, int(time.time() - start_time)) self.metadata_address = url2base.get(avail_url) return bool(avail_url)
def wait_for_metadata_service(self): mcfg = self.ds_cfg if not hasattr(mcfg, "get"): mcfg = {} max_wait = 120 try: max_wait = int(mcfg.get("max_wait", max_wait)) except Exception: util.logexc(log) log.warn("Failed to get max wait. using %s" % max_wait) if max_wait == 0: return False timeout = 50 try: timeout = int(mcfg.get("timeout", timeout)) except Exception: util.logexc(log) log.warn("Failed to get timeout, using %s" % timeout) def_mdurls = ["http://169.254.169.254", "http://instance-data.:8773"] mdurls = mcfg.get("metadata_urls", def_mdurls) # Remove addresses from the list that wont resolve. filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): log.debug("removed the following from metadata urls: %s" % list( (set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: log.warn("Empty metadata url list! using default list") mdurls = def_mdurls urls = [] url2base = {False: False} for url in mdurls: cur = "%s/%s/meta-data/instance-id" % (url, self.api_ver) urls.append(cur) url2base[cur] = url starttime = time.time() url = util.wait_for_url(urls=urls, max_wait=max_wait, timeout=timeout, status_cb=log.warn) if url: log.debug("Using metadata source: '%s'" % url2base[url]) else: log.critical("giving up on md after %i seconds\n" % int(time.time() - starttime)) self.metadata_address = url2base[url] return (bool(url))
def wait_for_metadata_service(self): urls = self.ds_cfg.get("metadata_urls", [DEF_MD_URL]) filtered = [x for x in urls if util.is_resolvable_url(x)] if set(filtered) != set(urls): LOG.debug("Removed the following from metadata urls: %s", list((set(urls) - set(filtered)))) if len(filtered): urls = filtered else: LOG.warning("Empty metadata url list! using default list") urls = [DEF_MD_URL] md_urls = [] url2base = {} for url in urls: md_url = url_helper.combine_url(url, 'openstack') md_urls.append(md_url) url2base[md_url] = url url_params = self.get_url_params() start_time = time.time() avail_url, _response = url_helper.wait_for_url( urls=md_urls, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds) if avail_url: LOG.debug("Using metadata source: '%s'", url2base[avail_url]) else: LOG.debug("Giving up on OpenStack md from %s after %s seconds", md_urls, int(time.time() - start_time)) self.metadata_address = url2base.get(avail_url) return bool(avail_url)
def wait_for_metadata_service(self): mcfg = self.ds_cfg url_params = self.get_url_params() if url_params.max_wait_seconds <= 0: return False # Remove addresses from the list that wont resolve. mdurls = mcfg.get("metadata_urls", self.metadata_urls) filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): LOG.debug("Removed the following from metadata urls: %s", list((set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: LOG.warning("Empty metadata url list! using default list") mdurls = self.metadata_urls # try the api token path first metadata_address = self._maybe_fetch_api_token(mdurls) # When running on EC2, we always access IMDS with an API token. # If we could not get an API token, then we assume the IMDS # endpoint was disabled and we move on without a data source. # Fallback to IMDSv1 if not running on EC2 if not metadata_address and self.cloud_name != CloudNames.AWS: # if we can't get a token, use instance-id path urls = [] url2base = {} url_path = '{ver}/meta-data/instance-id'.format( ver=self.min_metadata_version) request_method = 'GET' for url in mdurls: cur = '{0}/{1}'.format(url, url_path) urls.append(cur) url2base[cur] = url start_time = time.time() url, _ = uhelp.wait_for_url( urls=urls, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds, status_cb=LOG.warning, headers_redact=AWS_TOKEN_REDACT, headers_cb=self._get_headers, request_method=request_method) if url: metadata_address = url2base[url] if metadata_address: self.metadata_address = metadata_address LOG.debug("Using metadata source: '%s'", self.metadata_address) elif self.cloud_name == CloudNames.AWS: LOG.warning("IMDS's HTTP endpoint is probably disabled") else: LOG.critical("Giving up on md from %s after %s seconds", urls, int(time.time() - start_time)) return bool(metadata_address)
def get_data(self): if not platform_reports_gce(): return False # url_map: (our-key, path, required, is_text) url_map = [ ('instance-id', ('instance/id', ), True, True), ('availability-zone', ('instance/zone', ), True, True), ('local-hostname', ('instance/hostname', ), True, True), ('public-keys', ('project/attributes/sshKeys', 'instance/attributes/ssh-keys'), False, True), ('user-data', ('instance/attributes/user-data', ), False, False), ('user-data-encoding', ('instance/attributes/user-data-encoding', ), False, True), ] # if we cannot resolve the metadata server, then no point in trying if not util.is_resolvable_url(self.metadata_address): LOG.debug("%s is not resolvable", self.metadata_address) return False metadata_fetcher = GoogleMetadataFetcher(self.metadata_address) # iterate over url_map keys to get metadata items running_on_gce = False for (mkey, paths, required, is_text) in url_map: value = None for path in paths: new_value = metadata_fetcher.get_value(path, is_text) if new_value is not None: value = new_value if value: running_on_gce = True if required and value is None: msg = "required key %s returned nothing. not GCE" if not running_on_gce: LOG.debug(msg, mkey) else: LOG.warning(msg, mkey) return False self.metadata[mkey] = value if self.metadata['public-keys']: lines = self.metadata['public-keys'].splitlines() self.metadata['public-keys'] = [self._trim_key(k) for k in lines] if self.metadata['availability-zone']: self.metadata['availability-zone'] = self.metadata[ 'availability-zone'].split('/')[-1] encoding = self.metadata.get('user-data-encoding') if encoding: if encoding == 'base64': self.metadata['user-data'] = b64decode( self.metadata['user-data']) else: LOG.warning('unknown user-data-encoding: %s, ignoring', encoding) return running_on_gce
def wait_for_metadata_service(self): mcfg = self.ds_cfg if not hasattr(mcfg, "get"): mcfg = {} max_wait = 120 try: max_wait = int(mcfg.get("max_wait", max_wait)) except Exception: util.logexc(log) log.warn("Failed to get max wait. using %s" % max_wait) if max_wait == 0: return False timeout = 50 try: timeout = int(mcfg.get("timeout", timeout)) except Exception: util.logexc(log) log.warn("Failed to get timeout, using %s" % timeout) def_mdurls = ["http://169.254.169.254", "http://instance-data.:8773"] mdurls = mcfg.get("metadata_urls", def_mdurls) # Remove addresses from the list that wont resolve. filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): log.debug("removed the following from metadata urls: %s" % list((set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: log.warn("Empty metadata url list! using default list") mdurls = def_mdurls urls = [] url2base = {False: False} for url in mdurls: cur = "%s/%s/meta-data/instance-id" % (url, self.api_ver) urls.append(cur) url2base[cur] = url starttime = time.time() url = util.wait_for_url(urls=urls, max_wait=max_wait, timeout=timeout, status_cb=log.warn) if url: log.debug("Using metadata source: '%s'" % url2base[url]) else: log.critical("giving up on md after %i seconds\n" % int(time.time() - starttime)) self.metadata_address = url2base[url] return (bool(url))
def wait_for_metadata_service(self): mcfg = self.ds_cfg url_params = self.get_url_params() if url_params.max_wait_seconds <= 0: return False # Remove addresses from the list that wont resolve. mdurls = mcfg.get("metadata_urls", self.metadata_urls) filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): LOG.debug("Removed the following from metadata urls: %s", list((set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: LOG.warning("Empty metadata url list! using default list") mdurls = self.metadata_urls # try the api token path first metadata_address = self._maybe_fetch_api_token(mdurls) if not metadata_address: if self._api_token == API_TOKEN_DISABLED: LOG.warning('Retrying with IMDSv1') # if we can't get a token, use instance-id path urls = [] url2base = {} url_path = '{ver}/meta-data/instance-id'.format( ver=self.min_metadata_version) request_method = 'GET' for url in mdurls: cur = '{0}/{1}'.format(url, url_path) urls.append(cur) url2base[cur] = url start_time = time.time() url, _ = uhelp.wait_for_url(urls=urls, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds, status_cb=LOG.warning, headers_redact=AWS_TOKEN_REDACT, headers_cb=self._get_headers, request_method=request_method) if url: metadata_address = url2base[url] if metadata_address: self.metadata_address = metadata_address LOG.debug("Using metadata source: '%s'", self.metadata_address) else: LOG.critical("Giving up on md from %s after %s seconds", urls, int(time.time() - start_time)) return bool(metadata_address)
def get_data(self): # IBGCE metadata server requires a custom header since v1 headers = {'X-Google-Metadata-Request': str(True)} # url_map: (our-key, path, required) url_map = [ ('instance-id', 'instance/id', True), ('availability-zone', 'instance/zone', True), ('local-hostname', 'instance/hostname', True), ('public-keys', 'project/attributes/sshKeys', False), ('user-data', 'instance/attributes/user-data', False), ] # if we cannot resolve the metadata server, then no point in trying if not util.is_resolvable_url(self.metadata_address): LOG.debug("%s is not resolvable", self.metadata_address) return False # iterate over url_map keys to get metadata items found = False for (mkey, path, required) in url_map: try: resp = url_helper.readurl(url=self.metadata_address + path, headers=headers) if resp.code == 200: found = True self.metadata[mkey] = resp.contents else: if required: msg = "required url %s returned code %s. not IBGCE" if not found: LOG.debug(msg, path, resp.code) else: LOG.warn(msg, path, resp.code) return False else: self.metadata[mkey] = None except url_helper.UrlError as e: if required: msg = "required url %s raised exception %s. not IBGCE" if not found: LOG.debug(msg, path, e) else: LOG.warn(msg, path, e) return False msg = "Failed to get %s metadata item: %s." LOG.debug(msg, path, e) self.metadata[mkey] = None if self.metadata['public-keys']: lines = self.metadata['public-keys'].splitlines() self.metadata['public-keys'] = [self._trim_key(k) for k in lines] return found
def get_data(self): # url_map: (our-key, path, required, is_text) url_map = [ ('instance-id', ('instance/id',), True, True), ('availability-zone', ('instance/zone',), True, True), ('local-hostname', ('instance/hostname',), True, True), ('public-keys', ('project/attributes/sshKeys', 'instance/attributes/sshKeys'), False, True), ('user-data', ('instance/attributes/user-data',), False, False), ('user-data-encoding', ('instance/attributes/user-data-encoding',), False, True), ] # if we cannot resolve the metadata server, then no point in trying if not util.is_resolvable_url(self.metadata_address): LOG.debug("%s is not resolvable", self.metadata_address) return False metadata_fetcher = GoogleMetadataFetcher(self.metadata_address) # iterate over url_map keys to get metadata items running_on_gce = False for (mkey, paths, required, is_text) in url_map: value = None for path in paths: new_value = metadata_fetcher.get_value(path, is_text) if new_value is not None: value = new_value if value: running_on_gce = True if required and value is None: msg = "required key %s returned nothing. not GCE" if not running_on_gce: LOG.debug(msg, mkey) else: LOG.warn(msg, mkey) return False self.metadata[mkey] = value if self.metadata['public-keys']: lines = self.metadata['public-keys'].splitlines() self.metadata['public-keys'] = [self._trim_key(k) for k in lines] if self.metadata['availability-zone']: self.metadata['availability-zone'] = self.metadata[ 'availability-zone'].split('/')[-1] encoding = self.metadata.get('user-data-encoding') if encoding: if encoding == 'base64': self.metadata['user-data'] = b64decode( self.metadata['user-data']) else: LOG.warn('unknown user-data-encoding: %s, ignoring', encoding) return running_on_gce
def get_data(self): # GCE metadata server requires a custom header since v1 headers = {"X-Google-Metadata-Request": True} # url_map: (our-key, path, required) url_map = [ ("instance-id", "instance/id", True), ("availability-zone", "instance/zone", True), ("local-hostname", "instance/hostname", True), ("public-keys", "project/attributes/sshKeys", False), ("user-data", "instance/attributes/user-data", False), ] # if we cannot resolve the metadata server, then no point in trying if not util.is_resolvable_url(self.metadata_address): LOG.debug("%s is not resolvable", self.metadata_address) return False # iterate over url_map keys to get metadata items found = False for (mkey, path, required) in url_map: try: resp = url_helper.readurl(url=self.metadata_address + path, headers=headers) if resp.code == 200: found = True self.metadata[mkey] = resp.contents else: if required: msg = "required url %s returned code %s. not GCE" if not found: LOG.debug(msg, path, resp.code) else: LOG.warn(msg, path, resp.code) return False else: self.metadata[mkey] = None except url_helper.UrlError as e: if required: msg = "required url %s raised exception %s. not GCE" if not found: LOG.debug(msg, path, e) else: LOG.warn(msg, path, e) return False msg = "Failed to get %s metadata item: %s." LOG.debug(msg, path, e) self.metadata[mkey] = None if self.metadata["public-keys"]: lines = self.metadata["public-keys"].splitlines() self.metadata["public-keys"] = [self._trim_key(k) for k in lines] return found
def search_for_mirror(candidates): """ Search through a list of mirror urls for one that works This needs to return quickly. """ if candidates is None: return None LOG.debug("search for mirror in candidates: '%s'", candidates) for cand in candidates: try: if util.is_resolvable_url(cand): LOG.debug("found working mirror: '%s'", cand) return cand except Exception: pass return None
def wait_for_metadata_service(self): mcfg = self.ds_cfg if not mcfg: mcfg = {} (max_wait, timeout) = self._get_url_settings() if max_wait <= 0: return False # Remove addresses from the list that wont resolve. mdurls = mcfg.get("metadata_urls", DEF_MD_URLS) filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): LOG.debug("Removed the following from metadata urls: %s", list((set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: LOG.warn("Empty metadata url list! using default list") mdurls = DEF_MD_URLS urls = [] url2base = {} for url in mdurls: cur = "%s/%s/meta-data/instance-id" % (url, self.api_ver) urls.append(cur) url2base[cur] = url start_time = time.time() url = uhelp.wait_for_url(urls=urls, max_wait=max_wait, timeout=timeout, status_cb=LOG.warn) if url: LOG.debug("Using metadata source: '%s'", url2base[url]) else: LOG.critical("Giving up on md from %s after %s seconds", urls, int(time.time() - start_time)) self.metadata_address = url2base.get(url) return bool(url)
def wait_for_metadata_service(self): mcfg = self.ds_cfg url_params = self.get_url_params() if url_params.max_wait_seconds <= 0: return False # Remove addresses from the list that wont resolve. mdurls = mcfg.get("metadata_urls", self.metadata_urls) filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): LOG.debug("Removed the following from metadata urls: %s", list((set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: LOG.warning("Empty metadata url list! using default list") mdurls = self.metadata_urls urls = [] url2base = {} for url in mdurls: cur = '{0}/{1}/meta-data/instance-id'.format( url, self.min_metadata_version) urls.append(cur) url2base[cur] = url start_time = time.time() url = uhelp.wait_for_url(urls=urls, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds, status_cb=LOG.warn) if url: self.metadata_address = url2base[url] LOG.debug("Using metadata source: '%s'", self.metadata_address) else: LOG.critical("Giving up on md from %s after %s seconds", urls, int(time.time() - start_time)) return bool(url)
def wait_for_metadata_service(self): mcfg = self.ds_cfg url_params = self.get_url_params() if url_params.max_wait_seconds <= 0: return False # Remove addresses from the list that wont resolve. mdurls = mcfg.get("metadata_urls", self.metadata_urls) filtered = [x for x in mdurls if util.is_resolvable_url(x)] if set(filtered) != set(mdurls): LOG.debug("Removed the following from metadata urls: %s", list((set(mdurls) - set(filtered)))) if len(filtered): mdurls = filtered else: LOG.warning("Empty metadata url list! using default list") mdurls = self.metadata_urls urls = [] url2base = {} for url in mdurls: cur = '{0}/{1}/meta-data/instance-id'.format( url, self.min_metadata_version) urls.append(cur) url2base[cur] = url start_time = time.time() url = uhelp.wait_for_url( urls=urls, max_wait=url_params.max_wait_seconds, timeout=url_params.timeout_seconds, status_cb=LOG.warning) if url: self.metadata_address = url2base[url] LOG.debug("Using metadata source: '%s'", self.metadata_address) else: LOG.critical("Giving up on md from %s after %s seconds", urls, int(time.time() - start_time)) return bool(url)
def test_apt_v3_url_resolvable(self): """test_apt_v3_url_resolvable - Test resolving urls""" with mock.patch.object(util, 'is_resolvable') as mockresolve: util.is_resolvable_url("http://1.2.3.4/ubuntu") mockresolve.assert_called_with("1.2.3.4") with mock.patch.object(util, 'is_resolvable') as mockresolve: util.is_resolvable_url("http://us.archive.ubuntu.com/ubuntu") mockresolve.assert_called_with("us.archive.ubuntu.com") # former tests can leave this set (or not if the test is ran directly) # do a hard reset to ensure a stable result util._DNS_REDIRECT_IP = None bad = [(None, None, None, "badname", ["10.3.2.1"])] good = [(None, None, None, "goodname", ["10.2.3.4"])] with mock.patch.object(socket, 'getaddrinfo', side_effect=[bad, bad, bad, good, good]) as mocksock: ret = util.is_resolvable_url("http://us.archive.ubuntu.com/ubuntu") ret2 = util.is_resolvable_url("http://1.2.3.4/ubuntu") mocksock.assert_any_call('does-not-exist.example.com.', None, 0, 0, 1, 2) mocksock.assert_any_call('example.invalid.', None, 0, 0, 1, 2) mocksock.assert_any_call('us.archive.ubuntu.com', None) mocksock.assert_any_call('1.2.3.4', None) self.assertTrue(ret) self.assertTrue(ret2) # side effect need only bad ret after initial call with mock.patch.object(socket, 'getaddrinfo', side_effect=[bad]) as mocksock: ret3 = util.is_resolvable_url("http://failme.com/ubuntu") calls = [call('failme.com', None)] mocksock.assert_has_calls(calls) self.assertFalse(ret3)
def read_md(address=None, platform_check=True): if address is None: address = MD_V1_URL ret = { 'meta-data': None, 'user-data': None, 'success': False, 'reason': None } ret['platform_reports_gce'] = platform_reports_gce() if platform_check and not ret['platform_reports_gce']: ret['reason'] = "Not running on GCE." return ret # if we cannot resolve the metadata server, then no point in trying if not util.is_resolvable_url(address): LOG.debug("%s is not resolvable", address) ret['reason'] = 'address "%s" is not resolvable' % address return ret # url_map: (our-key, path, required, is_text) url_map = [ ('instance-id', ('instance/id', ), True, True), ('availability-zone', ('instance/zone', ), True, True), ('local-hostname', ('instance/hostname', ), True, True), ('public-keys', ('project/attributes/sshKeys', 'instance/attributes/ssh-keys'), False, True), ('user-data', ('instance/attributes/user-data', ), False, False), ('user-data-encoding', ('instance/attributes/user-data-encoding', ), False, True), ] metadata_fetcher = GoogleMetadataFetcher(address) md = {} # iterate over url_map keys to get metadata items for (mkey, paths, required, is_text) in url_map: value = None for path in paths: new_value = metadata_fetcher.get_value(path, is_text) if new_value is not None: value = new_value if required and value is None: msg = "required key %s returned nothing. not GCE" ret['reason'] = msg % mkey return ret md[mkey] = value if md['public-keys']: lines = md['public-keys'].splitlines() md['public-keys'] = [_trim_key(k) for k in lines] if md['availability-zone']: md['availability-zone'] = md['availability-zone'].split('/')[-1] encoding = md.get('user-data-encoding') if encoding: if encoding == 'base64': md['user-data'] = b64decode(md['user-data']) else: LOG.warning('unknown user-data-encoding: %s, ignoring', encoding) if 'user-data' in md: ret['user-data'] = md['user-data'] del md['user-data'] ret['meta-data'] = md ret['success'] = True return ret
def get_data(self): # GCE metadata server requires a custom header since v1 headers = {'X-Google-Metadata-Request': True} # url_map: (our-key, path, required, is_text) url_map = [ ('instance-id', 'instance/id', True, True), ('availability-zone', 'instance/zone', True, True), ('local-hostname', 'instance/hostname', True, True), ('public-keys', 'project/attributes/sshKeys', False, True), ('user-data', 'instance/attributes/user-data', False, False), ('user-data-encoding', 'instance/attributes/user-data-encoding', False, True), ] # if we cannot resolve the metadata server, then no point in trying if not util.is_resolvable_url(self.metadata_address): LOG.debug("%s is not resolvable", self.metadata_address) return False # iterate over url_map keys to get metadata items found = False for (mkey, path, required, is_text) in url_map: try: resp = url_helper.readurl(url=self.metadata_address + path, headers=headers) if resp.code == 200: found = True if is_text: self.metadata[mkey] = util.decode_binary(resp.contents) else: self.metadata[mkey] = resp.contents else: if required: msg = "required url %s returned code %s. not GCE" if not found: LOG.debug(msg, path, resp.code) else: LOG.warn(msg, path, resp.code) return False else: self.metadata[mkey] = None except url_helper.UrlError as e: if required: msg = "required url %s raised exception %s. not GCE" if not found: LOG.debug(msg, path, e) else: LOG.warn(msg, path, e) return False msg = "Failed to get %s metadata item: %s." LOG.debug(msg, path, e) self.metadata[mkey] = None if self.metadata['public-keys']: lines = self.metadata['public-keys'].splitlines() self.metadata['public-keys'] = [self._trim_key(k) for k in lines] encoding = self.metadata.get('user-data-encoding') if encoding: if encoding == 'base64': self.metadata['user-data'] = b64decode( self.metadata['user-data']) else: LOG.warn('unknown user-data-encoding: %s, ignoring', encoding) return found
def read_md(address=None, platform_check=True): if address is None: address = MD_V1_URL ret = { 'meta-data': None, 'user-data': None, 'success': False, 'reason': None } ret['platform_reports_gce'] = platform_reports_gce() if platform_check and not ret['platform_reports_gce']: ret['reason'] = "Not running on GCE." return ret # If we cannot resolve the metadata server, then no point in trying. if not util.is_resolvable_url(address): LOG.debug("%s is not resolvable", address) ret['reason'] = 'address "%s" is not resolvable' % address return ret # url_map: (our-key, path, required, is_text, is_recursive) url_map = [ ('instance-id', ('instance/id', ), True, True, False), ('availability-zone', ('instance/zone', ), True, True, False), ('local-hostname', ('instance/hostname', ), True, True, False), ('instance-data', ('instance/attributes', ), False, False, True), ('project-data', ('project/attributes', ), False, False, True), ] metadata_fetcher = GoogleMetadataFetcher(address) md = {} # Iterate over url_map keys to get metadata items. for (mkey, paths, required, is_text, is_recursive) in url_map: value = None for path in paths: new_value = metadata_fetcher.get_value(path, is_text, is_recursive) if new_value is not None: value = new_value if required and value is None: msg = "required key %s returned nothing. not GCE" ret['reason'] = msg % mkey return ret md[mkey] = value instance_data = json.loads(md['instance-data'] or '{}') project_data = json.loads(md['project-data'] or '{}') valid_keys = [instance_data.get('sshKeys'), instance_data.get('ssh-keys')] block_project = instance_data.get('block-project-ssh-keys', '').lower() if block_project != 'true' and not instance_data.get('sshKeys'): valid_keys.append(project_data.get('ssh-keys')) valid_keys.append(project_data.get('sshKeys')) public_keys_data = '\n'.join([key for key in valid_keys if key]) md['public-keys-data'] = public_keys_data.splitlines() if md['availability-zone']: md['availability-zone'] = md['availability-zone'].split('/')[-1] if 'user-data' in instance_data: # instance_data was json, so values are all utf-8 strings. ud = instance_data['user-data'].encode("utf-8") encoding = instance_data.get('user-data-encoding') if encoding == 'base64': ud = b64decode(ud) elif encoding: LOG.warning('unknown user-data-encoding: %s, ignoring', encoding) ret['user-data'] = ud ret['meta-data'] = md ret['success'] = True return ret
def read_md(address=None, platform_check=True): if address is None: address = MD_V1_URL ret = {'meta-data': None, 'user-data': None, 'success': False, 'reason': None} ret['platform_reports_gce'] = platform_reports_gce() if platform_check and not ret['platform_reports_gce']: ret['reason'] = "Not running on GCE." return ret # If we cannot resolve the metadata server, then no point in trying. if not util.is_resolvable_url(address): LOG.debug("%s is not resolvable", address) ret['reason'] = 'address "%s" is not resolvable' % address return ret # url_map: (our-key, path, required, is_text, is_recursive) url_map = [ ('instance-id', ('instance/id',), True, True, False), ('availability-zone', ('instance/zone',), True, True, False), ('local-hostname', ('instance/hostname',), True, True, False), ('instance-data', ('instance/attributes',), False, False, True), ('project-data', ('project/attributes',), False, False, True), ] metadata_fetcher = GoogleMetadataFetcher(address) md = {} # Iterate over url_map keys to get metadata items. for (mkey, paths, required, is_text, is_recursive) in url_map: value = None for path in paths: new_value = metadata_fetcher.get_value(path, is_text, is_recursive) if new_value is not None: value = new_value if required and value is None: msg = "required key %s returned nothing. not GCE" ret['reason'] = msg % mkey return ret md[mkey] = value instance_data = json.loads(md['instance-data'] or '{}') project_data = json.loads(md['project-data'] or '{}') valid_keys = [instance_data.get('sshKeys'), instance_data.get('ssh-keys')] block_project = instance_data.get('block-project-ssh-keys', '').lower() if block_project != 'true' and not instance_data.get('sshKeys'): valid_keys.append(project_data.get('ssh-keys')) valid_keys.append(project_data.get('sshKeys')) public_keys_data = '\n'.join([key for key in valid_keys if key]) md['public-keys-data'] = public_keys_data.splitlines() if md['availability-zone']: md['availability-zone'] = md['availability-zone'].split('/')[-1] if 'user-data' in instance_data: # instance_data was json, so values are all utf-8 strings. ud = instance_data['user-data'].encode("utf-8") encoding = instance_data.get('user-data-encoding') if encoding == 'base64': ud = b64decode(ud) elif encoding: LOG.warning('unknown user-data-encoding: %s, ignoring', encoding) ret['user-data'] = ud ret['meta-data'] = md ret['success'] = True return ret
def read_md(address=None, url_params=None, platform_check=True): if address is None: address = MD_V1_URL ret = { "meta-data": None, "user-data": None, "success": False, "reason": None, } ret["platform_reports_gce"] = platform_reports_gce() if platform_check and not ret["platform_reports_gce"]: ret["reason"] = "Not running on GCE." return ret # If we cannot resolve the metadata server, then no point in trying. if not util.is_resolvable_url(address): LOG.debug("%s is not resolvable", address) ret["reason"] = 'address "%s" is not resolvable' % address return ret # url_map: (our-key, path, required, is_text, is_recursive) url_map = [ ("instance-id", ("instance/id",), True, True, False), ("availability-zone", ("instance/zone",), True, True, False), ("local-hostname", ("instance/hostname",), True, True, False), ("instance-data", ("instance/attributes",), False, False, True), ("project-data", ("project/attributes",), False, False, True), ] metadata_fetcher = GoogleMetadataFetcher( address, url_params.num_retries, url_params.sec_between_retries ) md = {} # Iterate over url_map keys to get metadata items. for (mkey, paths, required, is_text, is_recursive) in url_map: value = None for path in paths: new_value = metadata_fetcher.get_value(path, is_text, is_recursive) if new_value is not None: value = new_value if required and value is None: msg = "required key %s returned nothing. not GCE" ret["reason"] = msg % mkey return ret md[mkey] = value instance_data = json.loads(md["instance-data"] or "{}") project_data = json.loads(md["project-data"] or "{}") valid_keys = [instance_data.get("sshKeys"), instance_data.get("ssh-keys")] block_project = instance_data.get("block-project-ssh-keys", "").lower() if block_project != "true" and not instance_data.get("sshKeys"): valid_keys.append(project_data.get("ssh-keys")) valid_keys.append(project_data.get("sshKeys")) public_keys_data = "\n".join([key for key in valid_keys if key]) md["public-keys-data"] = public_keys_data.splitlines() if md["availability-zone"]: md["availability-zone"] = md["availability-zone"].split("/")[-1] if "user-data" in instance_data: # instance_data was json, so values are all utf-8 strings. ud = instance_data["user-data"].encode("utf-8") encoding = instance_data.get("user-data-encoding") if encoding == "base64": ud = b64decode(ud) elif encoding: LOG.warning("unknown user-data-encoding: %s, ignoring", encoding) ret["user-data"] = ud ret["meta-data"] = md ret["success"] = True return ret