コード例 #1
0
    def _crawl_metadata(self):
        """Crawl metadata service when available.

        @returns: Dictionary with all metadata discovered for this datasource.
        @raise: InvalidMetaDataException on unreadable or broken
            metadata.
        """
        try:
            if not self.wait_for_metadata_service():
                raise sources.InvalidMetaDataException(
                    'No active metadata service found')
        except IOError as e:
            raise sources.InvalidMetaDataException(
                'IOError contacting metadata service: {error}'.format(
                    error=str(e)))

        url_params = self.get_url_params()

        try:
            result = util.log_time(LOG.debug,
                                   'Crawl of openstack metadata service',
                                   read_metadata_service,
                                   args=[self.metadata_address],
                                   kwargs={
                                       'ssl_details': self.ssl_details,
                                       'retries': url_params.num_retries,
                                       'timeout': url_params.timeout_seconds
                                   })
        except openstack.NonReadable as e:
            raise sources.InvalidMetaDataException(str(e))
        except (openstack.BrokenMetadata, IOError) as e:
            msg = 'Broken metadata address {addr}'.format(
                addr=self.metadata_address)
            raise sources.InvalidMetaDataException(msg) from e
        return result
コード例 #2
0
ファイル: DataSourceLXD.py プロジェクト: canonical/cloud-init
def _raw_instance_data_to_dict(metadata_type: str, metadata_value) -> dict:
    """Convert raw instance data from str, bytes, YAML to dict

    :param metadata_type: string, one of as: meta-data, vendor-data, user-data
        network-config

    :param metadata_value: str, bytes or dict representing or instance-data.

    :raises: InvalidMetaDataError on invalid instance-data content.
    """
    if isinstance(metadata_value, dict):
        return metadata_value
    if metadata_value is None:
        return {}
    try:
        parsed_metadata = util.load_yaml(metadata_value)
    except AttributeError as exc:  # not str or bytes
        raise sources.InvalidMetaDataException(
            "Invalid {md_type}. Expected str, bytes or dict but found:"
            " {value}".format(md_type=metadata_type,
                              value=metadata_value)) from exc
    if parsed_metadata is None:
        raise sources.InvalidMetaDataException(
            "Invalid {md_type} format. Expected YAML but found:"
            " {value}".format(md_type=metadata_type, value=metadata_value))
    return parsed_metadata
コード例 #3
0
    def crawl_metadata(self):
        """Walk all instance metadata sources returning a dict on success.

        @return: A dictionary of any metadata content for this instance.
        @raise: InvalidMetaDataException when the expected metadata service is
            unavailable, broken or disabled.
        """
        crawled_data = {}
        # azure removes/ejects the cdrom containing the ovf-env.xml
        # file on reboot.  So, in order to successfully reboot we
        # need to look in the datadir and consider that valid
        ddir = self.ds_cfg['data_dir']

        candidates = [self.seed_dir]
        if os.path.isfile(REPROVISION_MARKER_FILE):
            candidates.insert(0, "IMDS")
        candidates.extend(list_possible_azure_ds_devs())
        if ddir:
            candidates.append(ddir)

        found = None
        reprovision = False
        for cdev in candidates:
            try:
                if cdev == "IMDS":
                    ret = None
                    reprovision = True
                elif cdev.startswith("/dev/"):
                    if util.is_FreeBSD():
                        ret = util.mount_cb(cdev,
                                            load_azure_ds_dir,
                                            mtype="udf",
                                            sync=False)
                    else:
                        ret = util.mount_cb(cdev, load_azure_ds_dir)
                else:
                    ret = load_azure_ds_dir(cdev)

            except NonAzureDataSource:
                continue
            except BrokenAzureDataSource as exc:
                msg = 'BrokenAzureDataSource: %s' % exc
                raise sources.InvalidMetaDataException(msg)
            except util.MountFailedError:
                LOG.warning("%s was not mountable", cdev)
                continue

            perform_reprovision = reprovision or self._should_reprovision(ret)
            if perform_reprovision:
                if util.is_FreeBSD():
                    msg = "Free BSD is not supported for PPS VMs"
                    LOG.error(msg)
                    raise sources.InvalidMetaDataException(msg)
                ret = self._reprovision()
            imds_md = get_metadata_from_imds(self.fallback_interface,
                                             retries=10)
            (md, userdata_raw, cfg, files) = ret
            self.seed = cdev
            crawled_data.update({
                'cfg':
                cfg,
                'files':
                files,
                'metadata':
                util.mergemanydict([md, {
                    'imds': imds_md
                }]),
                'userdata_raw':
                userdata_raw
            })
            found = cdev

            LOG.debug("found datasource in %s", cdev)
            break

        if not found:
            raise sources.InvalidMetaDataException('No Azure metadata found')

        if found == ddir:
            LOG.debug("using files cached in %s", ddir)

        seed = _get_random_seed()
        if seed:
            crawled_data['metadata']['random_seed'] = seed
        crawled_data['metadata']['instance-id'] = util.read_dmi_data(
            'system-uuid')

        if perform_reprovision:
            LOG.info("Reporting ready to Azure after getting ReprovisionData")
            use_cached_ephemeral = (net.is_up(self.fallback_interface) and
                                    getattr(self, '_ephemeral_dhcp_ctx', None))
            if use_cached_ephemeral:
                self._report_ready(lease=self._ephemeral_dhcp_ctx.lease)
                self._ephemeral_dhcp_ctx.clean_network()  # Teardown ephemeral
            else:
                with EphemeralDHCPv4() as lease:
                    self._report_ready(lease=lease)

        return crawled_data
コード例 #4
0
ファイル: DataSourceLXD.py プロジェクト: canonical/cloud-init
def read_metadata(api_version: str = LXD_SOCKET_API_VERSION,
                  metadata_only: bool = False) -> dict:
    """Fetch metadata from the /dev/lxd/socket routes.

    Perform a number of HTTP GETs on known routes on the devlxd socket API.
    Minimally all containers must respond to http://lxd/1.0/meta-data when
    the LXD configuration setting `security.devlxd` is true.

    When `security.devlxd` is false, no /dev/lxd/socket file exists. This
    datasource will return False from `is_platform_viable` in that case.

    Perform a GET of <LXD_SOCKET_API_VERSION>/config` and walk all `user.*`
    configuration keys, storing all keys and values under a dict key
        LXD_SOCKET_API_VERSION: config {...}.

    In the presence of the following optional user config keys,
    create top level aliases:
      - user.user-data -> user-data
      - user.vendor-data -> vendor-data
      - user.network-config -> network-config

    :return:
        A dict with the following mandatory key: meta-data.
        Optional keys: user-data, vendor-data, network-config, network_mode

        Below <LXD_SOCKET_API_VERSION> is a dict representation of all raw
        configuration keys and values provided to the container surfaced by
        the socket under the /1.0/config/ route.
    """
    md: dict = {}
    lxd_url = "http://lxd"
    version_url = lxd_url + "/" + api_version + "/"
    with requests.Session() as session:
        session.mount(version_url, LXDSocketAdapter())
        # Raw meta-data as text
        md_route = "{route}meta-data".format(route=version_url)
        response = session.get(md_route)
        LOG.debug("[GET] [HTTP:%d] %s", response.status_code, md_route)
        if not response.ok:
            raise sources.InvalidMetaDataException(
                "Invalid HTTP response [{code}] from {route}: {resp}".format(
                    code=response.status_code,
                    route=md_route,
                    resp=response.text,
                ))

        md["meta-data"] = response.text
        if metadata_only:
            return md  # Skip network-data, vendor-data, user-data

        md = {
            "_metadata_api_version": api_version,  # Document API version read
            "config": {},
            "meta-data": md["meta-data"],
        }

        config_url = version_url + "config"
        # Represent all advertized/available config routes under
        # the dict path {LXD_SOCKET_API_VERSION: {config: {...}}.
        response = session.get(config_url)
        LOG.debug("[GET] [HTTP:%d] %s", response.status_code, config_url)
        if not response.ok:
            raise sources.InvalidMetaDataException(
                "Invalid HTTP response [{code}] from {route}: {resp}".format(
                    code=response.status_code,
                    route=config_url,
                    resp=response.text,
                ))
        try:
            config_routes = response.json()
        except JSONDecodeError as exc:
            raise sources.InvalidMetaDataException(
                "Unable to determine cloud-init config from {route}."
                " Expected JSON but found: {resp}".format(
                    route=config_url, resp=response.text)) from exc

        # Sorting keys to ensure we always process in alphabetical order.
        # cloud-init.* keys will sort before user.* keys which is preferred
        # precedence.
        for config_route in sorted(config_routes):
            url = "http://lxd{route}".format(route=config_route)
            response = session.get(url)
            LOG.debug("[GET] [HTTP:%d] %s", response.status_code, url)
            if response.ok:
                cfg_key = config_route.rpartition("/")[-1]
                # Leave raw data values/format unchanged to represent it in
                # instance-data.json for cloud-init query or jinja template
                # use.
                md["config"][cfg_key] = response.text
                # Promote common CONFIG_KEY_ALIASES to top-level keys.
                if cfg_key in CONFIG_KEY_ALIASES:
                    # Due to sort of config_routes, promote cloud-init.*
                    # aliases before user.*. This allows user.* keys to act as
                    # fallback config on old LXD, with new cloud-init images.
                    if CONFIG_KEY_ALIASES[cfg_key] not in md:
                        md[CONFIG_KEY_ALIASES[cfg_key]] = response.text
                    else:
                        LOG.warning(
                            "Ignoring LXD config %s in favor of %s value.",
                            cfg_key,
                            cfg_key.replace("user", "cloud-init", 1),
                        )
            else:
                LOG.debug(
                    "Skipping %s on [HTTP:%d]:%s",
                    url,
                    response.status_code,
                    response.text,
                )
    return md