def _get_metadata(self):
        resp = url_helper.readurl(
            self.metadata_address,
            data=None,
            timeout=self.timeout,
            retries=self.retries
        )
        metadata = json.loads(util.decode_binary(resp.contents))
        LOG.debug('metadata downloaded')

        # try to make a request on the first privileged port available
        for port in range(1, 1024):
            try:
                session = requests.Session()
                session.mount(
                    'http://',
                    SourceAddressAdapter(source_address=('0.0.0.0', port))
                )
                resp = url_helper.readurl(
                    self.userdata_address,
                    data=None,
                    timeout=self.timeout,
                    retries=self.retries,
                    session=session
                )
                user_data = util.decode_binary(resp.contents)
                LOG.debug('user-data downloaded')
                return metadata, user_data

            except url_helper.UrlError:  # try next port
                pass
Exemplo n.º 2
0
def get_none_if_empty_val(val):
    val = util.decode_binary(val)
    val = val.rstrip()

    if len(val) == 0:
        return None
    return val
Exemplo n.º 3
0
def query_data_api_once(api_address, timeout, requests_session):
    """
    Retrieve user data or vendor data.

    Scaleway user/vendor data API returns HTTP/404 if user/vendor data is not
    set.

    This function calls `url_helper.readurl` but instead of considering
    HTTP/404 as an error that requires a retry, it considers it as empty
    user/vendor data.

    Also, be aware the user data/vendor API requires the source port to be
    below 1024 to ensure the client is root (since non-root users can't bind
    ports below 1024). If requests raises ConnectionError (EADDRINUSE), the
    caller should retry to call this function on an other port.
    """
    try:
        resp = url_helper.readurl(
            api_address,
            data=None,
            timeout=timeout,
            # It's the caller's responsability to recall this function in case
            # of exception. Don't let url_helper.readurl() retry by itself.
            retries=0,
            session=requests_session,
            # If the error is a HTTP/404 or a ConnectionError, go into raise
            # block below and don't bother retrying.
            exception_cb=lambda _, exc: exc.code != 404 and
            (not isinstance(exc.cause, requests.exceptions.ConnectionError)))
        return util.decode_binary(resp.contents)
    except url_helper.UrlError as exc:
        # Empty user data.
        if exc.code == 404:
            return None
        raise
Exemplo n.º 4
0
def read_rta_oper_state(data):
    '''Reads Interface name and operational state from RTA Data.

    :param: data: string of data read from netlink socket
    :returns: InterfaceOperstate object containing if_name and oper_state.
              None if data does not contain valid IFLA_OPERSTATE and
              IFLA_IFNAME messages.
    :raises: AssertionError if data is None or length of data is
             smaller than RTATTR_START_OFFSET.
    '''
    assert (data is not None), ("data is none")
    assert (len(data) > RTATTR_START_OFFSET), (
        "length of data is smaller than RTATTR_START_OFFSET")
    ifname = operstate = None
    offset = RTATTR_START_OFFSET
    while offset <= len(data):
        attr = unpack_rta_attr(data, offset)
        if not attr or attr.length == 0:
            break
        # Each attribute is 4-byte aligned. Determine pad length.
        padlen = (PAD_ALIGNMENT -
                  (attr.length % PAD_ALIGNMENT)) % PAD_ALIGNMENT
        offset += attr.length + padlen

        if attr.rta_type == IFLA_OPERSTATE:
            operstate = ord(attr.data)
        elif attr.rta_type == IFLA_IFNAME:
            interface_name = util.decode_binary(attr.data, 'utf-8')
            ifname = interface_name.strip('\0')
    if not ifname or operstate is None:
        return None
    LOG.debug("rta attrs: ifname %s operstate %d", ifname, operstate)
    return InterfaceOperstate(ifname, operstate)
Exemplo n.º 5
0
def read_rta_oper_state(data):
    '''Reads Interface name and operational state from RTA Data.

    :param: data: string of data read from netlink socket
    :returns: InterfaceOperstate object containing if_name and oper_state.
              None if data does not contain valid IFLA_OPERSTATE and
              IFLA_IFNAME messages.
    :raises: AssertionError if data is None or length of data is
             smaller than RTATTR_START_OFFSET.
    '''
    assert (data is not None), ("data is none")
    assert (len(data) > RTATTR_START_OFFSET), (
        "length of data is smaller than RTATTR_START_OFFSET")
    ifname = operstate = None
    offset = RTATTR_START_OFFSET
    while offset <= len(data):
        attr = unpack_rta_attr(data, offset)
        if not attr or attr.length == 0:
            break
        # Each attribute is 4-byte aligned. Determine pad length.
        padlen = (PAD_ALIGNMENT -
                  (attr.length % PAD_ALIGNMENT)) % PAD_ALIGNMENT
        offset += attr.length + padlen

        if attr.rta_type == IFLA_OPERSTATE:
            operstate = ord(attr.data)
        elif attr.rta_type == IFLA_IFNAME:
            interface_name = util.decode_binary(attr.data, 'utf-8')
            ifname = interface_name.strip('\0')
    if not ifname or operstate is None:
        return None
    LOG.debug("rta attrs: ifname %s operstate %d", ifname, operstate)
    return InterfaceOperstate(ifname, operstate)
Exemplo n.º 6
0
def read_maas_seed_url(seed_url,
                       read_file_or_url=None,
                       timeout=None,
                       version=MD_VERSION,
                       paths=None,
                       retries=None):
    """
    Read the maas datasource at seed_url.
      read_file_or_url is a method that should provide an interface
      like util.read_file_or_url

    Expected format of seed_url is are the following files:
      * <seed_url>/<version>/meta-data/instance-id
      * <seed_url>/<version>/meta-data/local-hostname
      * <seed_url>/<version>/user-data
    """
    base_url = "%s/%s" % (seed_url, version)
    file_order = [
        'local-hostname',
        'instance-id',
        'public-keys',
        'user-data',
    ]
    files = {
        'local-hostname': "%s/%s" % (base_url, 'meta-data/local-hostname'),
        'instance-id': "%s/%s" % (base_url, 'meta-data/instance-id'),
        'public-keys': "%s/%s" % (base_url, 'meta-data/public-keys'),
        'user-data': "%s/%s" % (base_url, 'user-data'),
    }

    if read_file_or_url is None:
        read_file_or_url = util.read_file_or_url

    md = {}
    for name in file_order:
        url = files.get(name)
        if name == 'user-data':
            item_retries = 0
        else:
            item_retries = retries

        try:
            ssl_details = util.fetch_ssl_details(paths)
            resp = read_file_or_url(url,
                                    retries=item_retries,
                                    timeout=timeout,
                                    ssl_details=ssl_details)
            if resp.ok():
                if name in BINARY_FIELDS:
                    md[name] = resp.contents
                else:
                    md[name] = util.decode_binary(resp.contents)
            else:
                LOG.warn(("Fetching from %s resulted in"
                          " an invalid http code %s"), url, resp.code)
        except url_helper.UrlError as e:
            if e.code != 404:
                raise
    return check_seed_contents(md, seed_url)
Exemplo n.º 7
0
    def test_userdata_plain(self):
        mydata = "FOOBAR"
        odata = {'UserData': {'text': mydata, 'encoding': 'plain'}}
        data = {'ovfcontent': construct_valid_ovf_env(data=odata)}

        dsrc = self._get_ds(data)
        ret = dsrc.get_data()
        self.assertTrue(ret)
        self.assertEqual(decode_binary(dsrc.userdata_raw), mydata)
Exemplo n.º 8
0
def read_maas_seed_url(seed_url, header_cb=None, timeout=None,
                       version=MD_VERSION, paths=None):
    """
    Read the maas datasource at seed_url.
      - header_cb is a method that should return a headers dictionary for
        a given url

    Expected format of seed_url is are the following files:
      * <seed_url>/<version>/meta-data/instance-id
      * <seed_url>/<version>/meta-data/local-hostname
      * <seed_url>/<version>/user-data
    """
    base_url = "%s/%s" % (seed_url, version)
    file_order = [
        'local-hostname',
        'instance-id',
        'public-keys',
        'user-data',
    ]
    files = {
        'local-hostname': "%s/%s" % (base_url, 'meta-data/local-hostname'),
        'instance-id': "%s/%s" % (base_url, 'meta-data/instance-id'),
        'public-keys': "%s/%s" % (base_url, 'meta-data/public-keys'),
        'user-data': "%s/%s" % (base_url, 'user-data'),
    }

    md = {}
    for name in file_order:
        url = files.get(name)
        if not header_cb:
            def _cb(url):
                return {}
            header_cb = _cb

        if name == 'user-data':
            retries = 0
        else:
            retries = None

        try:
            ssl_details = util.fetch_ssl_details(paths)
            resp = util.read_file_or_url(url, retries=retries,
                                         headers_cb=header_cb,
                                         timeout=timeout,
                                         ssl_details=ssl_details)
            if resp.ok():
                if name in BINARY_FIELDS:
                    md[name] = resp.contents
                else:
                    md[name] = util.decode_binary(resp.contents)
            else:
                LOG.warn(("Fetching from %s resulted in"
                          " an invalid http code %s"), url, resp.code)
        except url_helper.UrlError as e:
            if e.code != 404:
                raise
    return check_seed_contents(md, seed_url)
Exemplo n.º 9
0
    def test_userdata_plain(self):
        mydata = "FOOBAR"
        odata = {'UserData': {'text': mydata, 'encoding': 'plain'}}
        data = {'ovfcontent': construct_valid_ovf_env(data=odata)}

        dsrc = self._get_ds(data)
        ret = dsrc.get_data()
        self.assertTrue(ret)
        self.assertEqual(decode_binary(dsrc.userdata_raw), mydata)
def read_maas_seed_url(
    seed_url,
    read_file_or_url=None,
    timeout=None,
    version=MD_VERSION,
    paths=None,
    retries=None,
):
    """
    Read the maas datasource at seed_url.
      read_file_or_url is a method that should provide an interface
      like util.read_file_or_url

    Expected format of seed_url is are the following files:
      * <seed_url>/<version>/meta-data/instance-id
      * <seed_url>/<version>/meta-data/local-hostname
      * <seed_url>/<version>/user-data
    If version is None, then <version>/ will not be used.
    """
    if read_file_or_url is None:
        read_file_or_url = url_helper.read_file_or_url

    if seed_url.endswith("/"):
        seed_url = seed_url[:-1]

    md = {}
    for path, _dictname, binary, optional in DS_FIELDS:
        if version is None:
            url = "%s/%s" % (seed_url, path)
        else:
            url = "%s/%s/%s" % (seed_url, version, path)
        try:
            ssl_details = util.fetch_ssl_details(paths)
            resp = read_file_or_url(url,
                                    retries=retries,
                                    timeout=timeout,
                                    ssl_details=ssl_details)
            if resp.ok():
                if binary:
                    md[path] = resp.contents
                else:
                    md[path] = util.decode_binary(resp.contents)
            else:
                LOG.warning(
                    "Fetching from %s resulted in an invalid http code %s",
                    url,
                    resp.code,
                )
        except url_helper.UrlError as e:
            if e.code == 404 and not optional:
                raise MAASSeedDirMalformed("Missing required %s: %s" %
                                           (path, e)) from e
            elif e.code != 404:
                raise e

    return check_seed_contents(md, seed_url)
Exemplo n.º 11
0
def type_from_starts_with(payload, default=None):
    try:
        payload_lc = util.decode_binary(payload).lower()
    except UnicodeDecodeError:
        return default
    payload_lc = payload_lc.lstrip()
    for text in INCLUSION_SRCH:
        if payload_lc.startswith(text):
            return INCLUSION_TYPES_MAP[text]
    return default
Exemplo n.º 12
0
def type_from_starts_with(payload, default=None):
    try:
        payload_lc = util.decode_binary(payload).lower()
    except UnicodeDecodeError:
        return default
    payload_lc = payload_lc.lstrip()
    for text in INCLUSION_SRCH:
        if payload_lc.startswith(text):
            return INCLUSION_TYPES_MAP[text]
    return default
Exemplo n.º 13
0
    def _crawl_metadata(self):
        resp = url_helper.readurl(self.metadata_address,
                                  timeout=self.timeout,
                                  retries=self.retries)
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api('user-data', self.userdata_address,
                                           self.retries, self.timeout)
        self.vendordata_raw = query_data_api('vendor-data',
                                             self.vendordata_address,
                                             self.retries, self.timeout)
Exemplo n.º 14
0
    def _crawl_metadata(self):
        resp = url_helper.readurl(self.metadata_address,
                                  timeout=self.timeout,
                                  retries=self.retries)
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api(
            'user-data', self.userdata_address,
            self.retries, self.timeout
        )
        self.vendordata_raw = query_data_api(
            'vendor-data', self.vendordata_address,
            self.retries, self.timeout
        )
Exemplo n.º 15
0
def decode(key, enc_type, data):
    LOG.debug("Getting encoded data for key=%s, enc=%s", key, enc_type)
    raw_data = None

    if enc_type in ["gzip+base64", "gz+b64"]:
        LOG.debug("Decoding %s type of %s", enc_type, key)
        raw_data = util.decomp_gzip(util.b64d(data))
    elif enc_type in ["base64", "b64"]:
        LOG.debug("Decoding %s type of %s", enc_type, key)
        raw_data = util.b64d(data)
    else:
        LOG.debug("Plain-text data %s", key)
        raw_data = data
    return util.decode_binary(raw_data)
Exemplo n.º 16
0
    def _get_data(self):
        if not on_scaleway():
            return False

        resp = url_helper.readurl(self.metadata_address,
                                  timeout=self.timeout,
                                  retries=self.retries)
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api('user-data', self.userdata_address,
                                           self.retries, self.timeout)
        self.vendordata_raw = query_data_api('vendor-data',
                                             self.vendordata_address,
                                             self.retries, self.timeout)
        return True
Exemplo n.º 17
0
def convert_string(raw_data, headers=None):
    if not raw_data:
        raw_data = ''
    if not headers:
        headers = {}
    data = util.decode_binary(util.decomp_gzip(raw_data))
    if "mime-version:" in data[0:4096].lower():
        msg = util.message_from_string(data)
        for (key, val) in headers.items():
            _replace_header(msg, key, val)
    else:
        mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE)
        maintype, subtype = mtype.split("/", 1)
        msg = MIMEBase(maintype, subtype, *headers)
        msg.set_payload(data)
    return msg
Exemplo n.º 18
0
def get_none_if_empty_val(val):
    """
    get_none_if_empty_val returns None if the provided value, once stripped
    of its trailing whitespace, is empty or equal to GUESTINFO_EMPTY_YAML_VAL.

    The return value is always a string, regardless of whether the input is
    a bytes class or a string.
    """

    # If the provided value is a bytes class, convert it to a string to
    # simplify the rest of this function's logic.
    val = util.decode_binary(val)
    val = val.rstrip()
    if len(val) == 0 or val == GUESTINFO_EMPTY_YAML_VAL:
        return None
    return val
Exemplo n.º 19
0
def convert_string(raw_data, headers=None):
    if not raw_data:
        raw_data = ''
    if not headers:
        headers = {}
    data = util.decode_binary(util.decomp_gzip(raw_data))
    if "mime-version:" in data[0:4096].lower():
        msg = util.message_from_string(data)
        for (key, val) in headers.items():
            _replace_header(msg, key, val)
    else:
        mtype = headers.get(CONTENT_TYPE, NOT_MULTIPART_TYPE)
        maintype, subtype = mtype.split("/", 1)
        msg = MIMEBase(maintype, subtype, *headers)
        msg.set_payload(data)
    return msg
Exemplo n.º 20
0
    def _crawl_metadata(self):
        # Stay backward compatible with classes w/o these attributes
        self.headers = getattr(self, 'headers', None)
        self.headers_redact = getattr(self, 'headers_redact', None)

        resp = url_helper.readurl(
            self.metadata_address, headers=self.headers, headers_redact=self.headers_redact, timeout=self.timeout, retries=self.retries
        )
        self.metadata = json.loads(util.decode_binary(resp.contents))

        self.userdata_raw = query_data_api(
            "user-data", self.userdata_address, self.headers, self.headers_redact, self.retries, self.timeout
        )
        self.vendordata_raw = query_data_api(
            "vendor-data", self.vendordata_address, self.headers, self.headers_redact, self.retries, self.timeout
        )
Exemplo n.º 21
0
 def get_value(self, path, is_text):
     value = None
     try:
         resp = url_helper.readurl(url=self.metadata_address + path,
                                   headers=self.headers)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
Exemplo n.º 22
0
 def get_value(self, path, is_text):
     value = None
     try:
         resp = url_helper.readurl(url=self.metadata_address + path,
                                   headers=self.headers)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
Exemplo n.º 23
0
 def __call__(self, field, blob):
     if not blob:
         return ''
     try:
         blob = util.decode_binary(blob)
     except UnicodeDecodeError:
         return blob
     if self._maybe_json_object(blob):
         try:
             # Assume it's json, unless it fails parsing...
             return json.loads(blob)
         except (ValueError, TypeError) as e:
             LOG.warning("Field %s looked like a json object, but it"
                         " was not: %s", field, e)
     if blob.find("\n") != -1:
         return blob.splitlines()
     return blob
Exemplo n.º 24
0
 def __call__(self, field, blob):
     if not blob:
         return blob
     try:
         blob = util.decode_binary(blob)
     except UnicodeDecodeError:
         return blob
     if self._maybe_json_object(blob):
         try:
             # Assume it's json, unless it fails parsing...
             return json.loads(blob)
         except (ValueError, TypeError) as e:
             LOG.warn("Field %s looked like a json object, but it was"
                      " not: %s", field, e)
     if blob.find("\n") != -1:
         return blob.splitlines()
     return blob
Exemplo n.º 25
0
def read_maas_seed_url(seed_url, read_file_or_url=None, timeout=None,
                       version=MD_VERSION, paths=None, retries=None):
    """
    Read the maas datasource at seed_url.
      read_file_or_url is a method that should provide an interface
      like util.read_file_or_url

    Expected format of seed_url is are the following files:
      * <seed_url>/<version>/meta-data/instance-id
      * <seed_url>/<version>/meta-data/local-hostname
      * <seed_url>/<version>/user-data
    If version is None, then <version>/ will not be used.
    """
    if read_file_or_url is None:
        read_file_or_url = url_helper.read_file_or_url

    if seed_url.endswith("/"):
        seed_url = seed_url[:-1]

    md = {}
    for path, _dictname, binary, optional in DS_FIELDS:
        if version is None:
            url = "%s/%s" % (seed_url, path)
        else:
            url = "%s/%s/%s" % (seed_url, version, path)
        try:
            ssl_details = util.fetch_ssl_details(paths)
            resp = read_file_or_url(url, retries=retries, timeout=timeout,
                                    ssl_details=ssl_details)
            if resp.ok():
                if binary:
                    md[path] = resp.contents
                else:
                    md[path] = util.decode_binary(resp.contents)
            else:
                LOG.warning(("Fetching from %s resulted in"
                             " an invalid http code %s"), url, resp.code)
        except url_helper.UrlError as e:
            if e.code == 404 and not optional:
                raise MAASSeedDirMalformed(
                    "Missing required %s: %s" % (path, e))
            elif e.code != 404:
                raise e

    return check_seed_contents(md, seed_url)
Exemplo n.º 26
0
def decode(key, enc_type, data):
    """
    decode returns the decoded string value of data
    key is a string used to identify the data being decoded in log messages
    """
    LOG.debug("Getting encoded data for key=%s, enc=%s", key, enc_type)

    raw_data = None
    if enc_type in ["gzip+base64", "gz+b64"]:
        LOG.debug("Decoding %s format %s", enc_type, key)
        raw_data = util.decomp_gzip(util.b64d(data))
    elif enc_type in ["base64", "b64"]:
        LOG.debug("Decoding %s format %s", enc_type, key)
        raw_data = util.b64d(data)
    else:
        LOG.debug("Plain-text data %s", key)
        raw_data = data

    return util.decode_binary(raw_data)
Exemplo n.º 27
0
 def get_value(self, path, is_text, is_recursive=False):
     value = None
     try:
         url = self.metadata_address + path
         if is_recursive:
             url += '/?recursive=True'
         resp = url_helper.readurl(url=url, headers=self.headers)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents.decode('utf-8')
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
Exemplo n.º 28
0
 def get_value(self, path, is_text, is_recursive=False):
     value = None
     try:
         url = self.metadata_address + path
         if is_recursive:
             url += '/?recursive=True'
         resp = url_helper.readurl(url=url, headers=HEADERS)
     except url_helper.UrlError as exc:
         msg = "url %s raised exception %s"
         LOG.debug(msg, path, exc)
     else:
         if resp.code == 200:
             if is_text:
                 value = util.decode_binary(resp.contents)
             else:
                 value = resp.contents.decode('utf-8')
         else:
             LOG.debug("url %s returned code %s", path, resp.code)
     return value
Exemplo n.º 29
0
    def _parse(self, blob):
        leaves = {}
        children = []
        blob = util.decode_binary(blob)

        if not blob:
            return (leaves, children)

        def has_children(item):
            if item.endswith("/"):
                return True
            else:
                return False

        def get_name(item):
            if item.endswith("/"):
                return item.rstrip("/")
            return item

        for field in blob.splitlines():
            field = field.strip()
            field_name = get_name(field)
            if not field or not field_name:
                continue
            # Don't materialize credentials
            if field_name == 'security-credentials':
                continue
            if has_children(field):
                if field_name not in children:
                    children.append(field_name)
            else:
                contents = field.split("=", 1)
                resource = field_name
                if len(contents) > 1:
                    # What a PITA...
                    (ident, sub_contents) = contents
                    ident = util.safe_int(ident)
                    if ident is not None:
                        resource = "%s/openssh-key" % (ident)
                        field_name = sub_contents
                leaves[field_name] = resource
        return (leaves, children)
Exemplo n.º 30
0
    def _parse(self, blob):
        leaves = {}
        children = []
        blob = util.decode_binary(blob)

        if not blob:
            return (leaves, children)

        def has_children(item):
            if item.endswith("/"):
                return True
            else:
                return False

        def get_name(item):
            if item.endswith("/"):
                return item.rstrip("/")
            return item

        for field in blob.splitlines():
            field = field.strip()
            field_name = get_name(field)
            if not field or not field_name:
                continue
            # Don't materialize credentials
            if field_name == 'security-credentials':
                continue
            if has_children(field):
                if field_name not in children:
                    children.append(field_name)
            else:
                contents = field.split("=", 1)
                resource = field_name
                if len(contents) > 1:
                    # What a PITA...
                    (ident, sub_contents) = contents
                    ident = util.safe_int(ident)
                    if ident is not None:
                        resource = "%s/openssh-key" % (ident)
                        field_name = sub_contents
                leaves[field_name] = resource
        return (leaves, children)
Exemplo n.º 31
0
def convert_string(raw_data, content_type=NOT_MULTIPART_TYPE):
    if not raw_data:
        raw_data = ''

    def create_binmsg(data, content_type):
        maintype, subtype = content_type.split("/", 1)
        msg = MIMEBase(maintype, subtype)
        msg.set_payload(data)
        return msg

    try:
        data = util.decode_binary(util.decomp_gzip(raw_data))
        if "mime-version:" in data[0:4096].lower():
            msg = util.message_from_string(data)
        else:
            msg = create_binmsg(data, content_type)
    except UnicodeDecodeError:
        msg = create_binmsg(raw_data, content_type)

    return msg
Exemplo n.º 32
0
def convert_string(raw_data, content_type=NOT_MULTIPART_TYPE):
    if not raw_data:
        raw_data = ""

    def create_binmsg(data, content_type):
        maintype, subtype = content_type.split("/", 1)
        msg = MIMEBase(maintype, subtype)
        msg.set_payload(data)
        return msg

    try:
        data = util.decode_binary(util.decomp_gzip(raw_data))
        if "mime-version:" in data[0:4096].lower():
            msg = util.message_from_string(data)
        else:
            msg = create_binmsg(data, content_type)
    except UnicodeDecodeError:
        msg = create_binmsg(raw_data, content_type)

    return msg
    def get_data(self, apply_filter=False):
        (is_do, droplet_id) = self._get_sysinfo()

        # only proceed if we know we are on DigitalOcean
        if not is_do:
            return False

        LOG.debug("reading metadata from {}".format(self.metadata_address))
        response = url_helper.readurl(self.metadata_address,
                                      timeout=self.timeout,
                                      sec_between=self.wait_retry,
                                      retries=self.retries)

        contents = util.decode_binary(response.contents)
        decoded = json.loads(contents)

        self.metadata = decoded
        self.metadata['instance-id'] = decoded.get('droplet_id', droplet_id)
        self.metadata['local-hostname'] = decoded.get('hostname', droplet_id)
        self.vendordata_raw = decoded.get("vendor_data", None)
        self.userdata_raw = decoded.get("user_data", None)
        return True
Exemplo n.º 34
0
def query_data_api_once(api_address, timeout, requests_session):
    """
    Retrieve user data or vendor data.

    Scaleway user/vendor data API returns HTTP/404 if user/vendor data is not
    set.

    This function calls `url_helper.readurl` but instead of considering
    HTTP/404 as an error that requires a retry, it considers it as empty
    user/vendor data.

    Also, be aware the user data/vendor API requires the source port to be
    below 1024 to ensure the client is root (since non-root users can't bind
    ports below 1024). If requests raises ConnectionError (EADDRINUSE), the
    caller should retry to call this function on an other port.
    """
    try:
        resp = url_helper.readurl(
            api_address,
            data=None,
            timeout=timeout,
            # It's the caller's responsability to recall this function in case
            # of exception. Don't let url_helper.readurl() retry by itself.
            retries=0,
            session=requests_session,
            # If the error is a HTTP/404 or a ConnectionError, go into raise
            # block below and don't bother retrying.
            exception_cb=lambda _, exc: exc.code != 404 and (
                not isinstance(exc.cause, requests.exceptions.ConnectionError)
            )
        )
        return util.decode_binary(resp.contents)
    except url_helper.UrlError as exc:
        # Empty user data.
        if exc.code == 404:
            return None
        raise
Exemplo n.º 35
0
 def test_decode_binary_plain_text_with_hex(self):
     blob = 'BOOTABLE_FLAG=\x80init=/bin/systemd'
     text = util.decode_binary(blob)
     self.assertEqual(text, blob)
Exemplo n.º 36
0
def handle(_name, cfg, cloud, log, _args):

    # remove the static keys from the pristine image
    if cfg.get("ssh_deletekeys", True):
        key_pth = os.path.join("/etc/ssh/", "ssh_host_*key*")
        for f in glob.glob(key_pth):
            try:
                util.del_file(f)
            except Exception:
                util.logexc(log, "Failed deleting key file %s", f)

    if "ssh_keys" in cfg:
        # if there are keys in cloud-config, use them
        for (key, val) in cfg["ssh_keys"].items():
            if key in CONFIG_KEY_TO_FILE:
                tgt_fn = CONFIG_KEY_TO_FILE[key][0]
                tgt_perms = CONFIG_KEY_TO_FILE[key][1]
                util.write_file(tgt_fn, val, tgt_perms)

        for (priv, pub) in PRIV_TO_PUB.items():
            if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']:
                continue
            pair = (CONFIG_KEY_TO_FILE[priv][0], CONFIG_KEY_TO_FILE[pub][0])
            cmd = ['sh', '-xc', KEY_GEN_TPL % pair]
            try:
                # TODO(harlowja): Is this guard needed?
                with util.SeLinuxGuard("/etc/ssh", recursive=True):
                    subp.subp(cmd, capture=False)
                log.debug("Generated a key for %s from %s", pair[0], pair[1])
            except Exception:
                util.logexc(log, "Failed generated a key for %s from %s",
                            pair[0], pair[1])
    else:
        # if not, generate them
        genkeys = util.get_cfg_option_list(cfg, 'ssh_genkeytypes',
                                           GENERATE_KEY_NAMES)
        lang_c = os.environ.copy()
        lang_c['LANG'] = 'C'
        for keytype in genkeys:
            keyfile = KEY_FILE_TPL % (keytype)
            if os.path.exists(keyfile):
                continue
            util.ensure_dir(os.path.dirname(keyfile))
            cmd = ['ssh-keygen', '-t', keytype, '-N', '', '-f', keyfile]

            # TODO(harlowja): Is this guard needed?
            with util.SeLinuxGuard("/etc/ssh", recursive=True):
                try:
                    out, err = subp.subp(cmd, capture=True, env=lang_c)
                    sys.stdout.write(util.decode_binary(out))
                except subp.ProcessExecutionError as e:
                    err = util.decode_binary(e.stderr).lower()
                    if (e.exit_code == 1
                            and err.lower().startswith("unknown key")):
                        log.debug("ssh-keygen: unknown key type '%s'", keytype)
                    else:
                        util.logexc(
                            log, "Failed generating key type %s to "
                            "file %s", keytype, keyfile)

    if "ssh_publish_hostkeys" in cfg:
        host_key_blacklist = util.get_cfg_option_list(
            cfg["ssh_publish_hostkeys"], "blacklist",
            HOST_KEY_PUBLISH_BLACKLIST)
        publish_hostkeys = util.get_cfg_option_bool(
            cfg["ssh_publish_hostkeys"], "enabled", PUBLISH_HOST_KEYS)
    else:
        host_key_blacklist = HOST_KEY_PUBLISH_BLACKLIST
        publish_hostkeys = PUBLISH_HOST_KEYS

    if publish_hostkeys:
        hostkeys = get_public_host_keys(blacklist=host_key_blacklist)
        try:
            cloud.datasource.publish_host_keys(hostkeys)
        except Exception:
            util.logexc(log, "Publishing host keys failed!")

    try:
        (users, _groups) = ug_util.normalize_users_groups(cfg, cloud.distro)
        (user, _user_config) = ug_util.extract_default(users)
        disable_root = util.get_cfg_option_bool(cfg, "disable_root", True)
        disable_root_opts = util.get_cfg_option_str(cfg, "disable_root_opts",
                                                    ssh_util.DISABLE_USER_OPTS)

        keys = []
        if util.get_cfg_option_bool(cfg, 'allow_public_ssh_keys', True):
            keys = cloud.get_public_ssh_keys() or []
        else:
            log.debug('Skipping import of publish SSH keys per '
                      'config setting: allow_public_ssh_keys=False')

        if "ssh_authorized_keys" in cfg:
            cfgkeys = cfg["ssh_authorized_keys"]
            keys.extend(cfgkeys)

        apply_credentials(keys, user, disable_root, disable_root_opts)
    except Exception:
        util.logexc(log, "Applying SSH credentials failed!")
Exemplo n.º 37
0
def handle(_name, cfg, cloud, log, _args):

    # remove the static keys from the pristine image
    if cfg.get("ssh_deletekeys", True):
        key_pth = os.path.join("/etc/ssh/", "ssh_host_*key*")
        for f in glob.glob(key_pth):
            try:
                util.del_file(f)
            except:
                util.logexc(log, "Failed deleting key file %s", f)

    if "ssh_keys" in cfg:
        # if there are keys in cloud-config, use them
        for (key, val) in cfg["ssh_keys"].items():
            if key in CONFIG_KEY_TO_FILE:
                tgt_fn = CONFIG_KEY_TO_FILE[key][0]
                tgt_perms = CONFIG_KEY_TO_FILE[key][1]
                util.write_file(tgt_fn, val, tgt_perms)

        for (priv, pub) in PRIV_TO_PUB.items():
            if pub in cfg['ssh_keys'] or priv not in cfg['ssh_keys']:
                continue
            pair = (CONFIG_KEY_TO_FILE[priv][0], CONFIG_KEY_TO_FILE[pub][0])
            cmd = ['sh', '-xc', KEY_GEN_TPL % pair]
            try:
                # TODO(harlowja): Is this guard needed?
                with util.SeLinuxGuard("/etc/ssh", recursive=True):
                    util.subp(cmd, capture=False)
                log.debug("Generated a key for %s from %s", pair[0], pair[1])
            except:
                util.logexc(log, "Failed generated a key for %s from %s",
                            pair[0], pair[1])
    else:
        # if not, generate them
        genkeys = util.get_cfg_option_list(cfg,
                                           'ssh_genkeytypes',
                                           GENERATE_KEY_NAMES)
        lang_c = os.environ.copy()
        lang_c['LANG'] = 'C'
        for keytype in genkeys:
            keyfile = KEY_FILE_TPL % (keytype)
            if os.path.exists(keyfile):
                continue
            util.ensure_dir(os.path.dirname(keyfile))
            cmd = ['ssh-keygen', '-t', keytype, '-N', '', '-f', keyfile]

            # TODO(harlowja): Is this guard needed?
            with util.SeLinuxGuard("/etc/ssh", recursive=True):
                try:
                    out, err = util.subp(cmd, capture=True, env=lang_c)
                    sys.stdout.write(util.decode_binary(out))
                except util.ProcessExecutionError as e:
                    err = util.decode_binary(e.stderr).lower()
                    if (e.exit_code == 1 and
                            err.lower().startswith("unknown key")):
                        log.debug("ssh-keygen: unknown key type '%s'", keytype)
                    else:
                        util.logexc(log, "Failed generating key type %s to "
                                    "file %s", keytype, keyfile)

    try:
        (users, _groups) = ds.normalize_users_groups(cfg, cloud.distro)
        (user, _user_config) = ds.extract_default(users)
        disable_root = util.get_cfg_option_bool(cfg, "disable_root", True)
        disable_root_opts = util.get_cfg_option_str(cfg, "disable_root_opts",
                                                    DISABLE_ROOT_OPTS)

        keys = cloud.get_public_ssh_keys() or []
        if "ssh_authorized_keys" in cfg:
            cfgkeys = cfg["ssh_authorized_keys"]
            keys.extend(cfgkeys)

        apply_credentials(keys, user, disable_root, disable_root_opts)
    except:
        util.logexc(log, "Applying ssh credentials failed!")
Exemplo n.º 38
0
def load_tfile_or_url(*args, **kwargs):
    """load_tfile_or_url
    load file and return content after decoding
    """
    return util.decode_binary(util.read_file_or_url(*args, **kwargs).contents)
Exemplo n.º 39
0
def load_tfile(*args, **kwargs):
    """load_tfile_or_url
    load file and return content after decoding
    """
    return util.decode_binary(util.read_file_or_url(*args, **kwargs).contents)
Exemplo n.º 40
0
    def get_data(self):
        # GCE metadata server requires a custom header since v1
        headers = {'X-Google-Metadata-Request': True}

        # url_map: (our-key, path, required, is_text)
        url_map = [
            ('instance-id', 'instance/id', True, True),
            ('availability-zone', 'instance/zone', True, True),
            ('local-hostname', 'instance/hostname', True, True),
            ('public-keys', 'project/attributes/sshKeys', False, True),
            ('user-data', 'instance/attributes/user-data', False, False),
            ('user-data-encoding', 'instance/attributes/user-data-encoding',
             False, True),
        ]

        # if we cannot resolve the metadata server, then no point in trying
        if not util.is_resolvable_url(self.metadata_address):
            LOG.debug("%s is not resolvable", self.metadata_address)
            return False

        # iterate over url_map keys to get metadata items
        found = False
        for (mkey, path, required, is_text) in url_map:
            try:
                resp = url_helper.readurl(url=self.metadata_address + path,
                                          headers=headers)
                if resp.code == 200:
                    found = True
                    if is_text:
                        self.metadata[mkey] = util.decode_binary(resp.contents)
                    else:
                        self.metadata[mkey] = resp.contents
                else:
                    if required:
                        msg = "required url %s returned code %s. not GCE"
                        if not found:
                            LOG.debug(msg, path, resp.code)
                        else:
                            LOG.warn(msg, path, resp.code)
                        return False
                    else:
                        self.metadata[mkey] = None
            except url_helper.UrlError as e:
                if required:
                    msg = "required url %s raised exception %s. not GCE"
                    if not found:
                        LOG.debug(msg, path, e)
                    else:
                        LOG.warn(msg, path, e)
                    return False
                msg = "Failed to get %s metadata item: %s."
                LOG.debug(msg, path, e)

                self.metadata[mkey] = None

        if self.metadata['public-keys']:
            lines = self.metadata['public-keys'].splitlines()
            self.metadata['public-keys'] = [self._trim_key(k) for k in lines]

        encoding = self.metadata.get('user-data-encoding')
        if encoding:
            if encoding == 'base64':
                self.metadata['user-data'] = b64decode(
                    self.metadata['user-data'])
            else:
                LOG.warn('unknown user-data-encoding: %s, ignoring', encoding)

        return found
Exemplo n.º 41
0
def load_tfile_or_url(*args, **kwargs):
    return (util.decode_binary(
        util.read_file_or_url(*args, **kwargs).contents))
Exemplo n.º 42
0
 def test_decode_binary_plain_text_with_hex(self):
     blob = 'BOOTABLE_FLAG=\x80init=/bin/systemd'
     text = util.decode_binary(blob)
     self.assertEqual(text, blob)
Exemplo n.º 43
0
def handle(_name, cfg, cloud, log, _args):

    # remove the static keys from the pristine image
    if cfg.get("ssh_deletekeys", True):
        key_pth = os.path.join("/etc/ssh/", "ssh_host_*key*")
        for f in glob.glob(key_pth):
            try:
                util.del_file(f)
            except Exception:
                util.logexc(log, "Failed deleting key file %s", f)

    if "ssh_keys" in cfg:
        # if there are keys and/or certificates in cloud-config, use them
        for (key, val) in cfg["ssh_keys"].items():
            # skip entry if unrecognized
            if key not in CONFIG_KEY_TO_FILE:
                continue
            tgt_fn = CONFIG_KEY_TO_FILE[key][0]
            tgt_perms = CONFIG_KEY_TO_FILE[key][1]
            util.write_file(tgt_fn, val, tgt_perms)
            # set server to present the most recently identified certificate
            if "_certificate" in key:
                cert_config = {"HostCertificate": tgt_fn}
                ssh_util.update_ssh_config(cert_config)

        for (priv, pub) in PRIV_TO_PUB.items():
            if pub in cfg["ssh_keys"] or priv not in cfg["ssh_keys"]:
                continue
            pair = (CONFIG_KEY_TO_FILE[priv][0], CONFIG_KEY_TO_FILE[pub][0])
            cmd = ["sh", "-xc", KEY_GEN_TPL % pair]
            try:
                # TODO(harlowja): Is this guard needed?
                with util.SeLinuxGuard("/etc/ssh", recursive=True):
                    subp.subp(cmd, capture=False)
                log.debug("Generated a key for %s from %s", pair[0], pair[1])
            except Exception:
                util.logexc(
                    log,
                    "Failed generated a key for %s from %s",
                    pair[0],
                    pair[1],
                )
    else:
        # if not, generate them
        genkeys = util.get_cfg_option_list(cfg, "ssh_genkeytypes",
                                           GENERATE_KEY_NAMES)
        lang_c = os.environ.copy()
        lang_c["LANG"] = "C"
        for keytype in genkeys:
            keyfile = KEY_FILE_TPL % (keytype)
            if os.path.exists(keyfile):
                continue
            util.ensure_dir(os.path.dirname(keyfile))
            cmd = ["ssh-keygen", "-t", keytype, "-N", "", "-f", keyfile]

            # TODO(harlowja): Is this guard needed?
            with util.SeLinuxGuard("/etc/ssh", recursive=True):
                try:
                    out, err = subp.subp(cmd, capture=True, env=lang_c)
                    if not util.get_cfg_option_bool(cfg, "ssh_quiet_keygen",
                                                    False):
                        sys.stdout.write(util.decode_binary(out))

                    gid = util.get_group_id("ssh_keys")
                    if gid != -1:
                        # perform same "sanitize permissions" as sshd-keygen
                        os.chown(keyfile, -1, gid)
                        os.chmod(keyfile, 0o640)
                        os.chmod(keyfile + ".pub", 0o644)
                except subp.ProcessExecutionError as e:
                    err = util.decode_binary(e.stderr).lower()
                    if e.exit_code == 1 and err.lower().startswith(
                            "unknown key"):
                        log.debug("ssh-keygen: unknown key type '%s'", keytype)
                    else:
                        util.logexc(
                            log,
                            "Failed generating key type %s to file %s",
                            keytype,
                            keyfile,
                        )

    if "ssh_publish_hostkeys" in cfg:
        host_key_blacklist = util.get_cfg_option_list(
            cfg["ssh_publish_hostkeys"],
            "blacklist",
            HOST_KEY_PUBLISH_BLACKLIST,
        )
        publish_hostkeys = util.get_cfg_option_bool(
            cfg["ssh_publish_hostkeys"], "enabled", PUBLISH_HOST_KEYS)
    else:
        host_key_blacklist = HOST_KEY_PUBLISH_BLACKLIST
        publish_hostkeys = PUBLISH_HOST_KEYS

    if publish_hostkeys:
        hostkeys = get_public_host_keys(blacklist=host_key_blacklist)
        try:
            cloud.datasource.publish_host_keys(hostkeys)
        except Exception:
            util.logexc(log, "Publishing host keys failed!")

    try:
        (users, _groups) = ug_util.normalize_users_groups(cfg, cloud.distro)
        (user, _user_config) = ug_util.extract_default(users)
        disable_root = util.get_cfg_option_bool(cfg, "disable_root", True)
        disable_root_opts = util.get_cfg_option_str(cfg, "disable_root_opts",
                                                    ssh_util.DISABLE_USER_OPTS)

        keys = []
        if util.get_cfg_option_bool(cfg, "allow_public_ssh_keys", True):
            keys = cloud.get_public_ssh_keys() or []
        else:
            log.debug("Skipping import of publish SSH keys per "
                      "config setting: allow_public_ssh_keys=False")

        if "ssh_authorized_keys" in cfg:
            cfgkeys = cfg["ssh_authorized_keys"]
            keys.extend(cfgkeys)

        apply_credentials(keys, user, disable_root, disable_root_opts)
    except Exception:
        util.logexc(log, "Applying SSH credentials failed!")
def load_tfile_or_url(*args, **kwargs):
    return(util.decode_binary(util.read_file_or_url(*args, **kwargs).contents))
Exemplo n.º 45
0
    def get_data(self):
        # GCE metadata server requires a custom header since v1
        headers = {'X-Google-Metadata-Request': True}

        # url_map: (our-key, path, required, is_text)
        url_map = [
            ('instance-id', 'instance/id', True, True),
            ('availability-zone', 'instance/zone', True, True),
            ('local-hostname', 'instance/hostname', True, True),
            ('public-keys', 'project/attributes/sshKeys', False, True),
            ('user-data', 'instance/attributes/user-data', False, False),
            ('user-data-encoding', 'instance/attributes/user-data-encoding',
             False, True),
        ]

        # if we cannot resolve the metadata server, then no point in trying
        if not util.is_resolvable_url(self.metadata_address):
            LOG.debug("%s is not resolvable", self.metadata_address)
            return False

        # iterate over url_map keys to get metadata items
        found = False
        for (mkey, path, required, is_text) in url_map:
            try:
                resp = url_helper.readurl(url=self.metadata_address + path,
                                          headers=headers)
                if resp.code == 200:
                    found = True
                    if is_text:
                        self.metadata[mkey] = util.decode_binary(resp.contents)
                    else:
                        self.metadata[mkey] = resp.contents
                else:
                    if required:
                        msg = "required url %s returned code %s. not GCE"
                        if not found:
                            LOG.debug(msg, path, resp.code)
                        else:
                            LOG.warn(msg, path, resp.code)
                        return False
                    else:
                        self.metadata[mkey] = None
            except url_helper.UrlError as e:
                if required:
                    msg = "required url %s raised exception %s. not GCE"
                    if not found:
                        LOG.debug(msg, path, e)
                    else:
                        LOG.warn(msg, path, e)
                    return False
                msg = "Failed to get %s metadata item: %s."
                LOG.debug(msg, path, e)

                self.metadata[mkey] = None

        if self.metadata['public-keys']:
            lines = self.metadata['public-keys'].splitlines()
            self.metadata['public-keys'] = [self._trim_key(k) for k in lines]

        encoding = self.metadata.get('user-data-encoding')
        if encoding:
            if encoding == 'base64':
                self.metadata['user-data'] = b64decode(
                    self.metadata['user-data'])
            else:
                LOG.warn('unknown user-data-encoding: %s, ignoring', encoding)

        return found