Beispiel #1
0
def has_url_connectivity(url_data: Dict[str, Any]) -> bool:
    """Return true when the instance has access to the provided URL.

    Logs a warning if url is not the expected format.

    url_data is a dictionary of kwargs to send to readurl. E.g.:

    has_url_connectivity({
        "url": "http://example.invalid",
        "headers": {"some": "header"},
        "timeout": 10
    })
    """
    if "url" not in url_data:
        LOG.warning(
            "Ignoring connectivity check. No 'url' to check in %s", url_data
        )
        return False
    url = url_data["url"]
    if not any([url.startswith("http://"), url.startswith("https://")]):
        LOG.warning(
            "Ignoring connectivity check. Expected URL beginning with http*://"
            " received '%s'",
            url,
        )
        return False
    if "timeout" not in url_data:
        url_data["timeout"] = 5
    try:
        readurl(**url_data)
    except UrlError:
        return False
    return True
    def _get_metadata(self):
        resp = url_helper.readurl(
            self.metadata_address,
            data=None,
            timeout=self.timeout,
            retries=self.retries
        )
        metadata = json.loads(util.decode_binary(resp.contents))
        LOG.debug('metadata downloaded')

        # try to make a request on the first privileged port available
        for port in range(1, 1024):
            try:
                session = requests.Session()
                session.mount(
                    'http://',
                    SourceAddressAdapter(source_address=('0.0.0.0', port))
                )
                resp = url_helper.readurl(
                    self.userdata_address,
                    data=None,
                    timeout=self.timeout,
                    retries=self.retries,
                    session=session
                )
                user_data = util.decode_binary(resp.contents)
                LOG.debug('user-data downloaded')
                return metadata, user_data

            except url_helper.UrlError:  # try next port
                pass
Beispiel #3
0
def read_metadata(endpoint_base=METADATA_ENDPOINT,
                  sys_uuid=None,
                  version='2013-10-17'):
    """Read metadata, return a dictionary.

    Each path listed in the index will be represented in the dictionary.
    If the path ends in .json, then the content will be decoded and
    populated into the dictionary.

    The system uuid (/sys/class/dmi/id/product_uuid) is also populated.
    Example: given paths = ('user_data', 'meta_data.json')
    This would return:
      {version: {'user_data': b'blob', 'meta_data': json.loads(blob.decode())
                 'system_uuid': '3b54f2e0-3ab2-458d-b770-af9926eee3b2'}}
    """
    endpoint = combine_url(endpoint_base, version) + "/"
    if sys_uuid is None:
        sys_uuid = _read_system_uuid()
    if not sys_uuid:
        raise sources.BrokenMetadata("Failed to read system uuid.")

    try:
        resp = readurl(endpoint)
        if not resp.ok():
            raise sources.BrokenMetadata("Bad response from %s: %s" %
                                         (endpoint, resp.code))
    except UrlError as e:
        raise sources.BrokenMetadata("Failed to read index at %s: %s" %
                                     (endpoint, e))

    entries = _load_index(resp.contents.decode('utf-8'))
    LOG.debug("index url %s contained: %s", endpoint, entries)

    # meta_data.json is required.
    mdj = 'meta_data.json'
    if mdj not in entries:
        raise sources.BrokenMetadata(
            "Required field '%s' missing in index at %s" % (mdj, endpoint))

    ret = {'system_uuid': sys_uuid}
    for path in entries:
        response = readurl(combine_url(endpoint, path))
        if path.endswith(".json"):
            ret[path.rpartition(".")[0]] = (json.loads(
                response.contents.decode('utf-8')))
        else:
            ret[path] = response.contents

    return {version: ret}
Beispiel #4
0
def has_url_connectivity(url):
    """Return true when the instance has access to the provided URL

    Logs a warning if url is not the expected format.
    """
    if not any([url.startswith('http://'), url.startswith('https://')]):
        LOG.warning(
            "Ignoring connectivity check. Expected URL beginning with http*://"
            " received '%s'", url)
        return False
    try:
        readurl(url, timeout=5)
    except UrlError:
        return False
    return True
Beispiel #5
0
def has_url_connectivity(url):
    """Return true when the instance has access to the provided URL

    Logs a warning if url is not the expected format.
    """
    if not any([url.startswith('http://'), url.startswith('https://')]):
        LOG.warning(
            "Ignoring connectivity check. Expected URL beginning with http*://"
            " received '%s'", url)
        return False
    try:
        readurl(url, timeout=5)
    except UrlError:
        return False
    return True
Beispiel #6
0
    def _poll_imds(self):
        """Poll IMDS for the new provisioning data until we get a valid
        response. Then return the returned JSON object."""
        url = IMDS_URL + "reprovisiondata?api-version=2017-04-02"
        headers = {"Metadata": "true"}
        report_ready = bool(not os.path.isfile(REPORTED_READY_MARKER_FILE))
        LOG.debug("Start polling IMDS")

        while True:
            try:
                # Save our EphemeralDHCPv4 context so we avoid repeated dhcp
                self._ephemeral_dhcp_ctx = EphemeralDHCPv4()
                lease = self._ephemeral_dhcp_ctx.obtain_lease()
                if report_ready:
                    path = REPORTED_READY_MARKER_FILE
                    LOG.info("Creating a marker file to report ready: %s",
                             path)
                    util.write_file(
                        path, "{pid}: {time}\n".format(pid=os.getpid(),
                                                       time=time()))
                    self._report_ready(lease=lease)
                    report_ready = False
                return readurl(url,
                               timeout=1,
                               headers=headers,
                               exception_cb=retry_on_url_exc,
                               infinite=True,
                               log_req_resp=False).contents
            except UrlError:
                # Teardown our EphemeralDHCPv4 context on failure as we retry
                self._ephemeral_dhcp_ctx.clean_network()
                pass
Beispiel #7
0
def install_chef(cloud, chef_cfg, log):
    # If chef is not installed, we install chef based on 'install_type'
    install_type = util.get_cfg_option_str(chef_cfg, 'install_type',
                                           'packages')
    run = util.get_cfg_option_bool(chef_cfg, 'exec', default=False)
    if install_type == "gems":
        # This will install and run the chef-client from gems
        chef_version = util.get_cfg_option_str(chef_cfg, 'version', None)
        ruby_version = util.get_cfg_option_str(chef_cfg, 'ruby_version',
                                               RUBY_VERSION_DEFAULT)
        install_chef_from_gems(cloud.distro, ruby_version, chef_version)
        # Retain backwards compat, by preferring True instead of False
        # when not provided/overriden...
        run = util.get_cfg_option_bool(chef_cfg, 'exec', default=True)
    elif install_type == 'packages':
        # This will install and run the chef-client from packages
        cloud.distro.install_packages(('chef',))
    elif install_type == 'omnibus':
        # This will install as a omnibus unified package
        url = util.get_cfg_option_str(chef_cfg, "omnibus_url", OMNIBUS_URL)
        retries = max(0, util.get_cfg_option_int(chef_cfg,
                                                 "omnibus_url_retries",
                                                 default=OMNIBUS_URL_RETRIES))
        content = url_helper.readurl(url=url, retries=retries)
        with util.tempdir() as tmpd:
            # Use tmpdir over tmpfile to avoid 'text file busy' on execute
            tmpf = "%s/chef-omnibus-install" % tmpd
            util.write_file(tmpf, str(content), mode=0700)
            util.subp([tmpf], capture=False)
    else:
        log.warn("Unknown chef install type '%s'", install_type)
        run = False
    return run
Beispiel #8
0
def install_puppet_aio(url=AIO_INSTALL_URL,
                       version=None,
                       collection=None,
                       cleanup=True):
    """Install puppet-agent from the puppetlabs repositories using the one-shot
    shell script

    :param url: URL from where to download the install script
    :param version: version to install, blank defaults to latest
    :param collection: collection to install, blank defaults to latest
    :param cleanup: whether to purge the puppetlabs repo after installation
    """
    args = []
    if version is not None:
        args = ['-v', version]
    if collection is not None:
        args += ['-c', collection]

    # Purge puppetlabs repos after installation
    if cleanup:
        args += ['--cleanup']
    content = url_helper.readurl(url=url, retries=5).contents

    # Use tmpdir over tmpfile to avoid 'text file busy' on execute
    with temp_utils.tempdir(needs_exe=True) as tmpd:
        tmpf = os.path.join(tmpd, 'puppet-install')
        util.write_file(tmpf, content, mode=0o700)
        return subp.subp([tmpf] + args, capture=False)
Beispiel #9
0
    def get_metadata_api_version(self):
        """Get the best supported api version from the metadata service.

        Loop through all extended support metadata versions in order and
        return the most-fully featured metadata api version discovered.

        If extended_metadata_versions aren't present, return the datasource's
        min_metadata_version.
        """
        # Assumes metadata service is already up
        url_tmpl = "{0}/{1}/meta-data/instance-id"
        headers = self._get_headers()
        for api_ver in self.extended_metadata_versions:
            url = url_tmpl.format(self.metadata_address, api_ver)
            try:
                resp = uhelp.readurl(url=url,
                                     headers=headers,
                                     headers_redact=AWS_TOKEN_REDACT)
            except uhelp.UrlError as e:
                LOG.debug("url %s raised exception %s", url, e)
            else:
                if resp.code == 200:
                    LOG.debug("Found preferred metadata version %s", api_ver)
                    return api_ver
                elif resp.code == 404:
                    msg = "Metadata api version %s not present. Headers: %s"
                    LOG.debug(msg, api_ver, resp.headers)
        return self.min_metadata_version
Beispiel #10
0
 def post(self, url, data=None, extra_headers=None):
     headers = self.headers
     if extra_headers is not None:
         headers = self.headers.copy()
         headers.update(extra_headers)
     return url_helper.readurl(url, data=data, headers=headers,
                               timeout=5, retries=10, sec_between=5)
Beispiel #11
0
    def get_metadata_api_version(self):
        """Get the best supported api version from the metadata service.

        Loop through all extended support metadata versions in order and
        return the most-fully featured metadata api version discovered.

        If extended_metadata_versions aren't present, return the datasource's
        min_metadata_version.
        """
        # Assumes metadata service is already up
        for api_ver in self.extended_metadata_versions:
            url = '{0}/{1}/meta-data/instance-id'.format(
                self.metadata_address, api_ver)
            try:
                resp = uhelp.readurl(url=url)
            except uhelp.UrlError as e:
                LOG.debug('url %s raised exception %s', url, e)
            else:
                if resp.code == 200:
                    LOG.debug('Found preferred metadata version %s', api_ver)
                    return api_ver
                elif resp.code == 404:
                    msg = 'Metadata api version %s not present. Headers: %s'
                    LOG.debug(msg, api_ver, resp.headers)
        return self.min_metadata_version
Beispiel #12
0
def install_chef(cloud, chef_cfg, log):
    # If chef is not installed, we install chef based on 'install_type'
    install_type = util.get_cfg_option_str(chef_cfg, 'install_type',
                                           'packages')
    run = util.get_cfg_option_bool(chef_cfg, 'exec', default=False)
    if install_type == "gems":
        # This will install and run the chef-client from gems
        chef_version = util.get_cfg_option_str(chef_cfg, 'version', None)
        ruby_version = util.get_cfg_option_str(chef_cfg, 'ruby_version',
                                               RUBY_VERSION_DEFAULT)
        install_chef_from_gems(ruby_version, chef_version, cloud.distro)
        # Retain backwards compat, by preferring True instead of False
        # when not provided/overriden...
        run = util.get_cfg_option_bool(chef_cfg, 'exec', default=True)
    elif install_type == 'packages':
        # This will install and run the chef-client from packages
        cloud.distro.install_packages(('chef', ))
    elif install_type == 'omnibus':
        # This will install as a omnibus unified package
        url = util.get_cfg_option_str(chef_cfg, "omnibus_url", OMNIBUS_URL)
        retries = max(
            0,
            util.get_cfg_option_int(chef_cfg,
                                    "omnibus_url_retries",
                                    default=OMNIBUS_URL_RETRIES))
        content = url_helper.readurl(url=url, retries=retries)
        with util.tempdir() as tmpd:
            # Use tmpdir over tmpfile to avoid 'text file busy' on execute
            tmpf = "%s/chef-omnibus-install" % tmpd
            util.write_file(tmpf, content, mode=0o700)
            util.subp([tmpf], capture=False)
    else:
        log.warn("Unknown chef install type '%s'", install_type)
        run = False
    return run
Beispiel #13
0
def read_userdata(url, timeout=2, sec_between=2, retries=30):
    response = url_helper.readurl(
        url, timeout=timeout, sec_between=sec_between, retries=retries
    )
    if not response.ok():
        raise RuntimeError("unable to read userdata at %s" % url)
    return response.contents
Beispiel #14
0
def read_metadata(url, timeout=2, sec_between=2, retries=30):
    response = url_helper.readurl(
        url, timeout=timeout, sec_between=sec_between, retries=retries
    )
    if not response.ok():
        raise RuntimeError("unable to read metadata at %s" % url)
    return util.load_yaml(response.contents.decode())
Beispiel #15
0
def query_data_api_once(api_address, timeout, requests_session):
    """
    Retrieve user data or vendor data.

    Scaleway user/vendor data API returns HTTP/404 if user/vendor data is not
    set.

    This function calls `url_helper.readurl` but instead of considering
    HTTP/404 as an error that requires a retry, it considers it as empty
    user/vendor data.

    Also, be aware the user data/vendor API requires the source port to be
    below 1024 to ensure the client is root (since non-root users can't bind
    ports below 1024). If requests raises ConnectionError (EADDRINUSE), the
    caller should retry to call this function on an other port.
    """
    try:
        resp = url_helper.readurl(
            api_address,
            data=None,
            timeout=timeout,
            # It's the caller's responsability to recall this function in case
            # of exception. Don't let url_helper.readurl() retry by itself.
            retries=0,
            session=requests_session,
            # If the error is a HTTP/404 or a ConnectionError, go into raise
            # block below and don't bother retrying.
            exception_cb=lambda _, exc: exc.code != 404 and
            (not isinstance(exc.cause, requests.exceptions.ConnectionError)))
        return util.decode_binary(resp.contents)
    except url_helper.UrlError as exc:
        # Empty user data.
        if exc.code == 404:
            return None
        raise
Beispiel #16
0
    def _poll_imds(self, report_ready=True):
        """Poll IMDS for the new provisioning data until we get a valid
        response. Then return the returned JSON object."""
        url = IMDS_URL + "?api-version=2017-04-02"
        headers = {"Metadata": "true"}
        LOG.debug("Start polling IMDS")

        def exc_cb(msg, exception):
            if isinstance(exception, UrlError) and exception.code == 404:
                return True
            # If we get an exception while trying to call IMDS, we
            # call DHCP and setup the ephemeral network to acquire the new IP.
            return False

        need_report = report_ready
        while True:
            try:
                with EphemeralDHCPv4() as lease:
                    if need_report:
                        self._report_ready(lease=lease)
                        need_report = False
                    return readurl(url,
                                   timeout=1,
                                   headers=headers,
                                   exception_cb=exc_cb,
                                   infinite=True).contents
            except UrlError:
                pass
Beispiel #17
0
    def _poll_imds(self):
        """Poll IMDS for the new provisioning data until we get a valid
        response. Then return the returned JSON object."""
        url = IMDS_URL + "reprovisiondata?api-version=2017-04-02"
        headers = {"Metadata": "true"}
        report_ready = bool(not os.path.isfile(REPORTED_READY_MARKER_FILE))
        LOG.debug("Start polling IMDS")

        def exc_cb(msg, exception):
            if isinstance(exception, UrlError) and exception.code == 404:
                return True
            # If we get an exception while trying to call IMDS, we
            # call DHCP and setup the ephemeral network to acquire the new IP.
            return False

        while True:
            try:
                with EphemeralDHCPv4() as lease:
                    if report_ready:
                        path = REPORTED_READY_MARKER_FILE
                        LOG.info("Creating a marker file to report ready: %s",
                                 path)
                        util.write_file(
                            path, "{pid}: {time}\n".format(pid=os.getpid(),
                                                           time=time()))
                        self._report_ready(lease=lease)
                        report_ready = False
                    return readurl(url,
                                   timeout=1,
                                   headers=headers,
                                   exception_cb=exc_cb,
                                   infinite=True).contents
            except UrlError:
                pass
Beispiel #18
0
 def get(self, url, secure=False):
     headers = self.headers
     if secure:
         headers = self.headers.copy()
         headers.update(self.extra_secure_headers)
     return url_helper.readurl(url, headers=headers,
                               timeout=5, retries=10, sec_between=5)
def try_read_url(url, distro_name, reset_net=True):
    raw_data = url_helper.readurl(url, timeout=3, retries=3).contents

    if type(raw_data) is bytes:
        raw_data = raw_data.decode()

    return raw_data
 def _fetch(metadata_version: int, path: str) -> dict:
     headers = {
         "Authorization": "Bearer Oracle"
     } if metadata_version > 1 else None
     return readurl(
         url=METADATA_PATTERN.format(version=metadata_version, path=path),
         headers=headers,
         retries=retries,
     )._response.json()
Beispiel #21
0
def _write_host_key_to_guest_attributes(key_type, key_value):
    url = '%s/%s/%s' % (GUEST_ATTRIBUTES_URL, HOSTKEY_NAMESPACE, key_type)
    key_value = key_value.encode('utf-8')
    resp = url_helper.readurl(url=url, data=key_value, headers=HEADERS,
                              request_method='PUT', check_status=False)
    if resp.ok():
        LOG.debug('Wrote %s host key to guest attributes.', key_type)
    else:
        LOG.debug('Unable to write %s host key to guest attributes.', key_type)
def read_opc_metadata():
    """
    Fetch metadata from the /opc/ routes.

    :return:
        The JSON-decoded value of the /opc/v1/instance/ endpoint on the IMDS.
    """
    # retries=1 as requested by Oracle to address a potential race condition
    return json.loads(readurl(METADATA_ENDPOINT, retries=1)._response.text)
Beispiel #23
0
    def _poll_imds(self):
        """Poll IMDS for the new provisioning data until we get a valid
        response. Then return the returned JSON object."""
        url = IMDS_URL + "reprovisiondata?api-version=2017-04-02"
        headers = {"Metadata": "true"}
        nl_sock = None
        report_ready = bool(not os.path.isfile(REPORTED_READY_MARKER_FILE))

        def exc_cb(msg, exception):
            if isinstance(exception, UrlError) and exception.code == 404:
                return True
            # If we get an exception while trying to call IMDS, we
            # call DHCP and setup the ephemeral network to acquire the new IP.
            return False

        LOG.debug("Wait for vnetswitch to happen")
        while True:
            try:
                # Save our EphemeralDHCPv4 context so we avoid repeated dhcp
                self._ephemeral_dhcp_ctx = EphemeralDHCPv4()
                lease = self._ephemeral_dhcp_ctx.obtain_lease()
                if report_ready:
                    try:
                        nl_sock = netlink.create_bound_netlink_socket()
                    except netlink.NetlinkCreateSocketError as e:
                        LOG.warning(e)
                        self._ephemeral_dhcp_ctx.clean_network()
                        return
                    path = REPORTED_READY_MARKER_FILE
                    LOG.info("Creating a marker file to report ready: %s",
                             path)
                    util.write_file(
                        path, "{pid}: {time}\n".format(pid=os.getpid(),
                                                       time=time()))
                    self._report_ready(lease=lease)
                    report_ready = False
                    try:
                        netlink.wait_for_media_disconnect_connect(
                            nl_sock, lease['interface'])
                    except AssertionError as error:
                        LOG.error(error)
                        return
                    self._ephemeral_dhcp_ctx.clean_network()
                else:
                    return readurl(url,
                                   timeout=1,
                                   headers=headers,
                                   exception_cb=exc_cb,
                                   infinite=True,
                                   log_req_resp=False).contents
            except UrlError:
                # Teardown our EphemeralDHCPv4 context on failure as we retry
                self._ephemeral_dhcp_ctx.clean_network()
                pass
            finally:
                if nl_sock:
                    nl_sock.close()
 def _get_data(self, apply_filter=False):
     url = get_url_from_file()
     if url is None:
         return False
     response = url_helper.readurl(url)
     decoded = json.loads(response.contents.decode())
     self.metadata = decoded["metadata"]
     self.vendordata_raw = decoded["vendordata_raw"]
     self.userdata_raw = decoded["userdata_raw"]
     return True
 def get_data(self, apply_filter=False):
     url = get_url_from_file()
     if url is None:
         return False
     response = url_helper.readurl(url)
     decoded = json.loads(response.contents)
     self.metadata = decoded["metadata"]
     self.vendordata_raw = decoded["vendordata_raw"]
     self.userdata_raw = decoded["userdata_raw"]
     return True
Beispiel #26
0
def handle(name, _cfg, cloud, log, _args):
    try:
        ud = cloud.get_userdata_raw()
    except Exception:
        log.debug("Failed to get raw userdata in module %s", name)
        return

    try:
        mdict = parse_qs(ud)
        if not mdict or MY_HOOKNAME not in mdict:
            log.debug(
                "Skipping module %s, did not find %s in parsed raw userdata",
                name,
                MY_HOOKNAME,
            )
            return
    except Exception:
        util.logexc(log, "Failed to parse query string %s into a dictionary",
                    ud)
        raise

    wrote_fns = []
    captured_excps = []

    # These will eventually be then ran by the cc_scripts_user
    # TODO(harlowja): maybe this should just be a new user data handler??
    # Instead of a late module that acts like a user data handler?
    scripts_d = cloud.get_ipath_cur("scripts")
    urls = mdict[MY_HOOKNAME]
    for (i, url) in enumerate(urls):
        fname = os.path.join(scripts_d, "rightscale-%02i" % (i))
        try:
            resp = uhelp.readurl(url)
            # Ensure its a valid http response (and something gotten)
            if resp.ok() and resp.contents:
                util.write_file(fname, resp, mode=0o700)
                wrote_fns.append(fname)
        except Exception as e:
            captured_excps.append(e)
            util.logexc(log, "%s failed to read %s and write %s", MY_NAME, url,
                        fname)

    if wrote_fns:
        log.debug("Wrote out rightscale userdata to %s files", len(wrote_fns))

    if len(wrote_fns) != len(urls):
        skipped = len(urls) - len(wrote_fns)
        log.debug("%s urls were skipped or failed", skipped)

    if captured_excps:
        log.warning(
            "%s failed with exceptions, re-raising the last one",
            len(captured_excps),
        )
        raise captured_excps[-1]
    def get_data(self):
        # IBGCE metadata server requires a custom header since v1
        headers = {'X-Google-Metadata-Request': str(True)}

        # url_map: (our-key, path, required)
        url_map = [
            ('instance-id', 'instance/id', True),
            ('availability-zone', 'instance/zone', True),
            ('local-hostname', 'instance/hostname', True),
            ('public-keys', 'project/attributes/sshKeys', False),
            ('user-data', 'instance/attributes/user-data', False),
        ]

        # if we cannot resolve the metadata server, then no point in trying
        if not util.is_resolvable_url(self.metadata_address):
            LOG.debug("%s is not resolvable", self.metadata_address)
            return False

        # iterate over url_map keys to get metadata items
        found = False
        for (mkey, path, required) in url_map:
            try:
                resp = url_helper.readurl(url=self.metadata_address + path,
                                          headers=headers)
                if resp.code == 200:
                    found = True
                    self.metadata[mkey] = resp.contents
                else:
                    if required:
                        msg = "required url %s returned code %s. not IBGCE"
                        if not found:
                            LOG.debug(msg, path, resp.code)
                        else:
                            LOG.warn(msg, path, resp.code)
                        return False
                    else:
                        self.metadata[mkey] = None
            except url_helper.UrlError as e:
                if required:
                    msg = "required url %s raised exception %s. not IBGCE"
                    if not found:
                        LOG.debug(msg, path, e)
                    else:
                        LOG.warn(msg, path, e)
                    return False
                msg = "Failed to get %s metadata item: %s."
                LOG.debug(msg, path, e)

                self.metadata[mkey] = None

        if self.metadata['public-keys']:
            lines = self.metadata['public-keys'].splitlines()
            self.metadata['public-keys'] = [self._trim_key(k) for k in lines]

        return found
Beispiel #28
0
    def get_data(self):
        # GCE metadata server requires a custom header since v1
        headers = {"X-Google-Metadata-Request": True}

        # url_map: (our-key, path, required)
        url_map = [
            ("instance-id", "instance/id", True),
            ("availability-zone", "instance/zone", True),
            ("local-hostname", "instance/hostname", True),
            ("public-keys", "project/attributes/sshKeys", False),
            ("user-data", "instance/attributes/user-data", False),
        ]

        # if we cannot resolve the metadata server, then no point in trying
        if not util.is_resolvable_url(self.metadata_address):
            LOG.debug("%s is not resolvable", self.metadata_address)
            return False

        # iterate over url_map keys to get metadata items
        found = False
        for (mkey, path, required) in url_map:
            try:
                resp = url_helper.readurl(url=self.metadata_address + path, headers=headers)
                if resp.code == 200:
                    found = True
                    self.metadata[mkey] = resp.contents
                else:
                    if required:
                        msg = "required url %s returned code %s. not GCE"
                        if not found:
                            LOG.debug(msg, path, resp.code)
                        else:
                            LOG.warn(msg, path, resp.code)
                        return False
                    else:
                        self.metadata[mkey] = None
            except url_helper.UrlError as e:
                if required:
                    msg = "required url %s raised exception %s. not GCE"
                    if not found:
                        LOG.debug(msg, path, e)
                    else:
                        LOG.warn(msg, path, e)
                    return False
                msg = "Failed to get %s metadata item: %s."
                LOG.debug(msg, path, e)

                self.metadata[mkey] = None

        if self.metadata["public-keys"]:
            lines = self.metadata["public-keys"].splitlines()
            self.metadata["public-keys"] = [self._trim_key(k) for k in lines]

        return found
Beispiel #29
0
    def _crawl_metadata(self):
        resp = url_helper.readurl(self.metadata_address,
                                  timeout=self.timeout,
                                  retries=self.retries)
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api('user-data', self.userdata_address,
                                           self.retries, self.timeout)
        self.vendordata_raw = query_data_api('vendor-data',
                                             self.vendordata_address,
                                             self.retries, self.timeout)
Beispiel #30
0
def read_metadata(url, timeout, retries, sec_between):
    url = "%s/v1.json" % url
    response = url_helper.readurl(url,
                                  timeout=timeout,
                                  retries=retries,
                                  headers={'Metadata-Token': 'vultr'},
                                  sec_between=sec_between)

    if not response.ok():
        raise RuntimeError("Failed to connect to %s: Code: %s" % url,
                           response.code)

    return response.contents.decode()
Beispiel #31
0
def handle(name, _cfg, cloud, log, _args):
    try:
        ud = cloud.get_userdata_raw()
    except Exception:
        log.debug("Failed to get raw userdata in module %s", name)
        return

    try:
        mdict = parse_qs(ud)
        if not mdict or MY_HOOKNAME not in mdict:
            log.debug(("Skipping module %s, "
                       "did not find %s in parsed"
                       " raw userdata"), name, MY_HOOKNAME)
            return
    except Exception:
        util.logexc(log, "Failed to parse query string %s into a dictionary",
                    ud)
        raise

    wrote_fns = []
    captured_excps = []

    # These will eventually be then ran by the cc_scripts_user
    # TODO(harlowja): maybe this should just be a new user data handler??
    # Instead of a late module that acts like a user data handler?
    scripts_d = cloud.get_ipath_cur('scripts')
    urls = mdict[MY_HOOKNAME]
    for (i, url) in enumerate(urls):
        fname = os.path.join(scripts_d, "rightscale-%02i" % (i))
        try:
            resp = uhelp.readurl(url)
            # Ensure its a valid http response (and something gotten)
            if resp.ok() and resp.contents:
                util.write_file(fname, resp, mode=0o700)
                wrote_fns.append(fname)
        except Exception as e:
            captured_excps.append(e)
            util.logexc(log, "%s failed to read %s and write %s", MY_NAME, url,
                        fname)

    if wrote_fns:
        log.debug("Wrote out rightscale userdata to %s files", len(wrote_fns))

    if len(wrote_fns) != len(urls):
        skipped = len(urls) - len(wrote_fns)
        log.debug("%s urls were skipped or failed", skipped)

    if captured_excps:
        log.warn("%s failed with exceptions, re-raising the last one",
                 len(captured_excps))
        raise captured_excps[-1]
    def _crawl_metadata(self):
        resp = url_helper.readurl(self.metadata_address,
                                  timeout=self.timeout,
                                  retries=self.retries)
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api(
            'user-data', self.userdata_address,
            self.retries, self.timeout
        )
        self.vendordata_raw = query_data_api(
            'vendor-data', self.vendordata_address,
            self.retries, self.timeout
        )
Beispiel #33
0
def has_url_connectivity(url_data: Dict[str, Any]) -> bool:
    """Return true when the instance has access to the provided URL.

    Logs a warning if url is not the expected format.

    url_data is a dictionary of kwargs to send to readurl. E.g.:

    has_url_connectivity({
        "url": "http://example.invalid",
        "headers": {"some": "header"},
        "timeout": 10
    })
    """
    if "url" not in url_data:
        LOG.warning(
            "Ignoring connectivity check. No 'url' to check in %s", url_data
        )
        return False
    url = url_data["url"]
    try:
        result = urlparse(url)
        if not any([result.scheme == "http", result.scheme == "https"]):
            LOG.warning(
                "Ignoring connectivity check. Invalid URL scheme %s",
                url.scheme,
            )
            return False
    except ValueError as err:
        LOG.warning("Ignoring connectivity check. Invalid URL %s", err)
        return False
    if "timeout" not in url_data:
        url_data["timeout"] = 5
    try:
        readurl(**url_data)
    except UrlError:
        return False
    return True
    def _get_data(self):
        if not on_scaleway():
            return False

        resp = url_helper.readurl(self.metadata_address,
                                  timeout=self.timeout,
                                  retries=self.retries)
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api('user-data', self.userdata_address,
                                           self.retries, self.timeout)
        self.vendordata_raw = query_data_api('vendor-data',
                                             self.vendordata_address,
                                             self.retries, self.timeout)
        return True
    def _crawl_metadata(self):
        # Stay backward compatible with classes w/o these attributes
        self.headers = getattr(self, 'headers', None)
        self.headers_redact = getattr(self, 'headers_redact', None)

        resp = url_helper.readurl(
            self.metadata_address, headers=self.headers, headers_redact=self.headers_redact, timeout=self.timeout, retries=self.retries
        )
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api(
            "user-data", self.userdata_address, self.headers, self.headers_redact, self.retries, self.timeout
        )
        self.vendordata_raw = query_data_api(
            "vendor-data", self.vendordata_address, self.headers, self.headers_redact, self.retries, self.timeout
        )
Beispiel #36
0
 def get_value(self, path, is_text):
     value = None
     try:
         resp = url_helper.readurl(url=self.metadata_address + path,
                                   headers=self.headers)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
Beispiel #37
0
def http_with_retries(url, **kwargs) -> url_helper.UrlResponse:
    """Wrapper around url_helper.readurl() with custom telemetry logging
    that url_helper.readurl() does not provide.
    """
    max_readurl_attempts = 240
    default_readurl_timeout = 5
    sleep_duration_between_retries = 5
    periodic_logging_attempts = 12

    if "timeout" not in kwargs:
        kwargs["timeout"] = default_readurl_timeout

    # remove kwargs that cause url_helper.readurl to retry,
    # since we are already implementing our own retry logic.
    if kwargs.pop("retries", None):
        LOG.warning("Ignoring retries kwarg passed in for "
                    "communication with Azure endpoint.")
    if kwargs.pop("infinite", None):
        LOG.warning("Ignoring infinite kwarg passed in for communication "
                    "with Azure endpoint.")

    for attempt in range(1, max_readurl_attempts + 1):
        try:
            ret = url_helper.readurl(url, **kwargs)

            report_diagnostic_event(
                "Successful HTTP request with Azure endpoint %s after "
                "%d attempts" % (url, attempt),
                logger_func=LOG.debug,
            )

            return ret

        except Exception as e:
            if attempt % periodic_logging_attempts == 0:
                report_diagnostic_event(
                    "Failed HTTP request with Azure endpoint %s during "
                    "attempt %d with exception: %s" % (url, attempt, e),
                    logger_func=LOG.debug,
                )
            if attempt == max_readurl_attempts:
                raise

        time.sleep(sleep_duration_between_retries)

    raise RuntimeError("Failed to return in http_with_retries")
Beispiel #38
0
 def get_value(self, path, is_text):
     value = None
     try:
         resp = url_helper.readurl(url=self.metadata_address + path,
                                   headers=self.headers)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
def _get_metadata_from_imds(retries):

    url = IMDS_URL + "instance?api-version=2017-12-01"
    headers = {"Metadata": "true"}
    try:
        response = readurl(
            url, timeout=1, headers=headers, retries=retries,
            exception_cb=retry_on_url_exc)
    except Exception as e:
        LOG.debug('Ignoring IMDS instance metadata: %s', e)
        return {}
    try:
        return util.load_json(str(response))
    except json.decoder.JSONDecodeError:
        LOG.warning(
            'Ignoring non-json IMDS instance metadata: %s', str(response))
    return {}
Beispiel #40
0
    def _path_read(self, path):

        def should_retry_cb(_request_args, cause):
            try:
                code = int(cause.code)
                if code >= 400:
                    return False
            except (TypeError, ValueError):
                # Older versions of requests didn't have a code.
                pass
            return True

        response = url_helper.readurl(path,
                                      retries=self.retries,
                                      ssl_details=self.ssl_details,
                                      timeout=self.timeout,
                                      exception_cb=should_retry_cb)
        return response.contents
Beispiel #41
0
 def get_value(self, path, is_text, is_recursive=False):
     value = None
     try:
         url = self.metadata_address + path
         if is_recursive:
             url += '/?recursive=True'
         resp = url_helper.readurl(url=url, headers=self.headers)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents.decode('utf-8')
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
Beispiel #42
0
def install_chef_from_omnibus(url=None, retries=None, omnibus_version=None):
    """Install an omnibus unified package from url.

    @param url: URL where blob of chef content may be downloaded. Defaults to
        OMNIBUS_URL.
    @param retries: Number of retries to perform when attempting to read url.
        Defaults to OMNIBUS_URL_RETRIES
    @param omnibus_version: Optional version string to require for omnibus
        install.
    """
    if url is None:
        url = OMNIBUS_URL
    if retries is None:
        retries = OMNIBUS_URL_RETRIES

    if omnibus_version is None:
        args = []
    else:
        args = ['-v', omnibus_version]
    content = url_helper.readurl(url=url, retries=retries).contents
    return util.subp_blob_in_tempfile(
        blob=content, args=args,
        basename='chef-omnibus-install', capture=False)
def query_data_api_once(api_address, timeout, requests_session):
    """
    Retrieve user data or vendor data.

    Scaleway user/vendor data API returns HTTP/404 if user/vendor data is not
    set.

    This function calls `url_helper.readurl` but instead of considering
    HTTP/404 as an error that requires a retry, it considers it as empty
    user/vendor data.

    Also, be aware the user data/vendor API requires the source port to be
    below 1024 to ensure the client is root (since non-root users can't bind
    ports below 1024). If requests raises ConnectionError (EADDRINUSE), the
    caller should retry to call this function on an other port.
    """
    try:
        resp = url_helper.readurl(
            api_address,
            data=None,
            timeout=timeout,
            # It's the caller's responsability to recall this function in case
            # of exception. Don't let url_helper.readurl() retry by itself.
            retries=0,
            session=requests_session,
            # If the error is a HTTP/404 or a ConnectionError, go into raise
            # block below and don't bother retrying.
            exception_cb=lambda _, exc: exc.code != 404 and (
                not isinstance(exc.cause, requests.exceptions.ConnectionError)
            )
        )
        return util.decode_binary(resp.contents)
    except url_helper.UrlError as exc:
        # Empty user data.
        if exc.code == 404:
            return None
        raise
Beispiel #44
0
def handle(name, cfg, cloud, log, _args):

    # If there isn't a chef key in the configuration don't do anything
    if 'chef' not in cfg:
        log.debug(("Skipping module named %s,"
                  " no 'chef' key in configuration"), name)
        return
    chef_cfg = cfg['chef']

    # Ensure the chef directories we use exist
    for d in CHEF_DIRS:
        util.ensure_dir(d)

    # Set the validation key based on the presence of either 'validation_key'
    # or 'validation_cert'. In the case where both exist, 'validation_key'
    # takes precedence
    for key in ('validation_key', 'validation_cert'):
        if key in chef_cfg and chef_cfg[key]:
            util.write_file('/etc/chef/validation.pem', chef_cfg[key])
            break

    # Create the chef config from template
    template_fn = cloud.get_template_filename('chef_client.rb')
    if template_fn:
        iid = str(cloud.datasource.get_instance_id())
        params = {
            'server_url': chef_cfg['server_url'],
            'node_name': util.get_cfg_option_str(chef_cfg, 'node_name', iid),
            'environment': util.get_cfg_option_str(chef_cfg, 'environment',
                                                   '_default'),
            'validation_name': chef_cfg['validation_name']
        }
        templater.render_to_file(template_fn, '/etc/chef/client.rb', params)
    else:
        log.warn("No template found, not rendering to /etc/chef/client.rb")

    # set the firstboot json
    initial_json = {}
    if 'run_list' in chef_cfg:
        initial_json['run_list'] = chef_cfg['run_list']
    if 'initial_attributes' in chef_cfg:
        initial_attributes = chef_cfg['initial_attributes']
        for k in list(initial_attributes.keys()):
            initial_json[k] = initial_attributes[k]
    util.write_file('/etc/chef/firstboot.json', json.dumps(initial_json))

    # If chef is not installed, we install chef based on 'install_type'
    if (not os.path.isfile('/usr/bin/chef-client') or
            util.get_cfg_option_bool(chef_cfg,
                'force_install', default=False)):

        install_type = util.get_cfg_option_str(chef_cfg, 'install_type',
                                               'packages')
        if install_type == "gems":
            # this will install and run the chef-client from gems
            chef_version = util.get_cfg_option_str(chef_cfg, 'version', None)
            ruby_version = util.get_cfg_option_str(chef_cfg, 'ruby_version',
                                                   RUBY_VERSION_DEFAULT)
            install_chef_from_gems(cloud.distro, ruby_version, chef_version)
            # and finally, run chef-client
            log.debug('Running chef-client')
            util.subp(['/usr/bin/chef-client',
                       '-d', '-i', '1800', '-s', '20'], capture=False)
        elif install_type == 'packages':
            # this will install and run the chef-client from packages
            cloud.distro.install_packages(('chef',))
        elif install_type == 'omnibus':
            url = util.get_cfg_option_str(chef_cfg, "omnibus_url", OMNIBUS_URL)
            content = url_helper.readurl(url=url, retries=5)
            with util.tempdir() as tmpd:
                # use tmpd over tmpfile to avoid 'Text file busy' on execute
                tmpf = "%s/chef-omnibus-install" % tmpd
                util.write_file(tmpf, str(content), mode=0700)
                util.subp([tmpf], capture=False)
        else:
            log.warn("Unknown chef install type %s", install_type)
Beispiel #45
0
def read_userdata(url, timeout=2, sec_between=2, retries=30):
    response = url_helper.readurl(url, timeout=timeout,
                                  sec_between=sec_between, retries=retries)
    if not response.ok():
        raise RuntimeError("unable to read userdata at %s" % url)
    return response.contents
Beispiel #46
0
 def _path_read(self, path):
     response = url_helper.readurl(path,
                                   retries=self.retries,
                                   ssl_details=self.ssl_details,
                                   timeout=self.timeout)
     return response.contents
Beispiel #47
0
def read_metadata(url, timeout=2, sec_between=2, retries=30):
    response = url_helper.readurl(url, timeout=timeout,
                                  sec_between=sec_between, retries=retries)
    if not response.ok():
        raise RuntimeError("unable to read metadata at %s" % url)
    return json.loads(response.contents.decode())
Beispiel #48
0
    def get_data(self):
        # GCE metadata server requires a custom header since v1
        headers = {'X-Google-Metadata-Request': True}

        # url_map: (our-key, path, required, is_text)
        url_map = [
            ('instance-id', 'instance/id', True, True),
            ('availability-zone', 'instance/zone', True, True),
            ('local-hostname', 'instance/hostname', True, True),
            ('public-keys', 'project/attributes/sshKeys', False, True),
            ('user-data', 'instance/attributes/user-data', False, False),
            ('user-data-encoding', 'instance/attributes/user-data-encoding',
             False, True),
        ]

        # if we cannot resolve the metadata server, then no point in trying
        if not util.is_resolvable_url(self.metadata_address):
            LOG.debug("%s is not resolvable", self.metadata_address)
            return False

        # iterate over url_map keys to get metadata items
        found = False
        for (mkey, path, required, is_text) in url_map:
            try:
                resp = url_helper.readurl(url=self.metadata_address + path,
                                          headers=headers)
                if resp.code == 200:
                    found = True
                    if is_text:
                        self.metadata[mkey] = util.decode_binary(resp.contents)
                    else:
                        self.metadata[mkey] = resp.contents
                else:
                    if required:
                        msg = "required url %s returned code %s. not GCE"
                        if not found:
                            LOG.debug(msg, path, resp.code)
                        else:
                            LOG.warn(msg, path, resp.code)
                        return False
                    else:
                        self.metadata[mkey] = None
            except url_helper.UrlError as e:
                if required:
                    msg = "required url %s raised exception %s. not GCE"
                    if not found:
                        LOG.debug(msg, path, e)
                    else:
                        LOG.warn(msg, path, e)
                    return False
                msg = "Failed to get %s metadata item: %s."
                LOG.debug(msg, path, e)

                self.metadata[mkey] = None

        if self.metadata['public-keys']:
            lines = self.metadata['public-keys'].splitlines()
            self.metadata['public-keys'] = [self._trim_key(k) for k in lines]

        encoding = self.metadata.get('user-data-encoding')
        if encoding:
            if encoding == 'base64':
                self.metadata['user-data'] = b64decode(
                    self.metadata['user-data'])
            else:
                LOG.warn('unknown user-data-encoding: %s, ignoring', encoding)

        return found