Пример #1
0
 def test_get_utf16_content(self):
     filename = 'heat.utf16'
     content = b'//4tTkhTCgA=\n'
     # utf6 has '\0' in stream
     self.assertIn(b'\0', base64.decode_as_bytes(content))
     decoded_content = base64.decode_as_bytes(content)
     self.assertRaises(UnicodeDecodeError, decoded_content.decode)
     self.check_non_utf8_content(filename=filename, content=content)
Пример #2
0
 def test_get_gb18030_content(self):
     filename = 'heat.gb18030'
     content = b'1tDO5wo=\n'
     # gb18030 has no '\0' in stream
     self.assertNotIn('\0', base64.decode_as_bytes(content))
     decoded_content = base64.decode_as_bytes(content)
     self.assertRaises(UnicodeDecodeError, decoded_content.decode)
     self.check_non_utf8_content(filename=filename, content=content)
Пример #3
0
def _validate_base64_format(instance):
    try:
        if isinstance(instance, six.text_type):
            instance = instance.encode('utf-8')
        base64.decode_as_bytes(instance)
    except TypeError:
        # The name must be string type. If instance isn't string type, the
        # TypeError will be raised at here.
        return False

    return True
Пример #4
0
    def test_get_zip_content(self):
        filename = 'heat.zip'
        content = b'''\
UEsDBAoAAAAAAEZZWkRbOAuBBQAAAAUAAAAIABwAaGVhdC50eHRVVAkAAxRbDVNYh\
t9SdXgLAAEE\n6AMAAATpAwAAaGVhdApQSwECHgMKAAAAAABGWVpEWzgLgQUAAAAF\
AAAACAAYAAAAAAABAAAApIEA\nAAAAaGVhdC50eHRVVAUAAxRbDVN1eAsAAQToAwA\
ABOkDAABQSwUGAAAAAAEAAQBOAAAARwAAAAAA\n'''
        # zip has '\0' in stream
        self.assertIn(b'\0', base64.decode_as_bytes(content))
        decoded_content = base64.decode_as_bytes(content)
        self.assertRaises(UnicodeDecodeError, decoded_content.decode)
        self.check_non_utf8_content(filename=filename, content=content)
Пример #5
0
    def _validate_and_build_base_options(self, context, flavor,
                                         image_uuid, name, description,
                                         availability_zone, metadata,
                                         requested_networks, user_data,
                                         key_name, max_count, partitions):
        """Verify all the input parameters"""

        if user_data:
            l = len(user_data)
            if l > MAX_USERDATA_SIZE:
                raise exception.ServerUserDataTooLarge(
                    length=l, maxsize=MAX_USERDATA_SIZE)

            try:
                base64utils.decode_as_bytes(user_data)
            except TypeError:
                raise exception.ServerUserDataMalformed()

        # Note:  max_count is the number of servers requested by the user,
        # max_network_count is the maximum number of servers taking into
        # account any network quotas
        max_network_count = self._check_requested_networks(context,
                                                           requested_networks,
                                                           max_count)

        if key_name is not None:
            key_pair = objects.KeyPair.get_by_name(context,
                                                   context.user_id,
                                                   key_name)
        else:
            key_pair = None

        base_options = {
            'image_uuid': image_uuid,
            'status': states.BUILDING,
            'user_id': context.user,
            'project_id': context.tenant,
            'power_state': states.NOSTATE,
            'flavor_uuid': flavor['uuid'],
            'name': name,
            'description': description,
            'metadata': metadata or {},
            'partitions': partitions or {},
            'availability_zone': availability_zone,
            'key_name': key_name}

        # return the validated options
        return base_options, max_network_count, key_pair
Пример #6
0
 def check_non_utf8_content(self, filename, content):
     base_url = 'file:///tmp'
     url = '%s/%s' % (base_url, filename)
     template = {
         'resources': {
             'one_init': {
                 'type': 'OS::Heat::CloudConfig',
                 'properties': {
                     'cloud_config': {
                         'write_files': [{
                             'path': '/tmp/%s' % filename,
                             'content': {
                                 'get_file': url
                             },
                             'encoding': 'b64'
                         }]
                     }
                 }
             }
         }
     }
     with mock.patch('six.moves.urllib.request.urlopen') as mock_url:
         raw_content = base64.decode_as_bytes(content)
         response = six.BytesIO(raw_content)
         mock_url.return_value = response
         files = {}
         template_utils.resolve_template_get_files(template, files,
                                                   base_url)
         self.assertEqual({url: content}, files)
         mock_url.assert_called_with(url)
Пример #7
0
def _get_configdrive(configdrive, node_uuid, tempdir=None):
    """Get the information about size and location of the configdrive.

    :param configdrive: Base64 encoded Gzipped configdrive content or
        configdrive HTTP URL.
    :param node_uuid: Node's uuid. Used for logging.
    :param tempdir: temporary directory for the temporary configdrive file
    :raises: InstanceDeployFailure if it can't download or decode the
       config drive.
    :returns: A tuple with the size in MiB and path to the uncompressed
        configdrive file.

    """
    # Check if the configdrive option is a HTTP URL or the content directly
    is_url = utils.is_http_url(configdrive)
    if is_url:
        try:
            data = requests.get(configdrive).content
        except requests.exceptions.RequestException as e:
            raise exception.InstanceDeployFailure(
                _("Can't download the configdrive content for node %(node)s "
                  "from '%(url)s'. Reason: %(reason)s") %
                {'node': node_uuid, 'url': configdrive, 'reason': e})
    else:
        data = configdrive

    try:
        data = six.BytesIO(base64.decode_as_bytes(data))
    except TypeError:
        error_msg = (_('Config drive for node %s is not base64 encoded '
                       'or the content is malformed.') % node_uuid)
        if is_url:
            error_msg += _(' Downloaded from "%s".') % configdrive
        raise exception.InstanceDeployFailure(error_msg)

    configdrive_file = tempfile.NamedTemporaryFile(delete=False,
                                                   prefix='configdrive',
                                                   dir=tempdir)
    configdrive_mb = 0
    with gzip.GzipFile('configdrive', 'rb', fileobj=data) as gunzipped:
        try:
            shutil.copyfileobj(gunzipped, configdrive_file)
        except EnvironmentError as e:
            # Delete the created file
            utils.unlink_without_raise(configdrive_file.name)
            raise exception.InstanceDeployFailure(
                _('Encountered error while decompressing and writing '
                  'config drive for node %(node)s. Error: %(exc)s') %
                {'node': node_uuid, 'exc': e})
        else:
            # Get the file size and convert to MiB
            configdrive_file.seek(0, os.SEEK_END)
            bytes_ = configdrive_file.tell()
            configdrive_mb = int(math.ceil(float(bytes_) / units.Mi))
        finally:
            configdrive_file.close()

        return (configdrive_mb, configdrive_file.name)
Пример #8
0
    def test_gzip_and_b64encode(self):
        contents = b'Squidward Tentacles'
        io_dict = {'fake-name': io.BytesIO(bytes(contents))}
        data = utils.gzip_and_b64encode(io_dict=io_dict)

        res = io.BytesIO(base64.decode_as_bytes(data))
        with tarfile.open(fileobj=res) as tar:
            members = [(m.name, m.size) for m in tar]
            self.assertEqual([('fake-name', len(contents))], members)

            member = tar.extractfile('fake-name')
            self.assertEqual(contents, member.read())
Пример #9
0
    def test_gzip_and_b64encode(self):
        contents = b'Squidward Tentacles'
        io_dict = {'fake-name': io.BytesIO(bytes(contents))}
        data = utils.gzip_and_b64encode(io_dict=io_dict)
        self.assertIsInstance(data, six.text_type)

        res = io.BytesIO(base64.decode_as_bytes(data))
        with tarfile.open(fileobj=res) as tar:
            members = [(m.name, m.size) for m in tar]
            self.assertEqual([('fake-name', len(contents))], members)

            member = tar.extractfile('fake-name')
            self.assertEqual(contents, member.read())
Пример #10
0
    def get(self, key, default=None):
        try:
            name = key.to_text(True)
            if six.PY3 and isinstance(name, bytes):
                name = name.decode('utf-8')
            criterion = {'name': name}
            tsigkey = self.storage.find_tsigkey(
                context.get_current(), criterion)

            return base64.decode_as_bytes(tsigkey.secret)

        except exceptions.TsigKeyNotFound:
            return default
Пример #11
0
def get_signature(signature_data):
    """Decode the signature data and returns the signature.

    :param siganture_data: the base64-encoded signature data
    :returns: the decoded signature
    :raises: SignatureVerificationError if the signature data is malformatted
    """
    try:
        signature = base64.decode_as_bytes(signature_data)
    except (TypeError, binascii.Error):
        raise exception.SignatureVerificationError("The signature data was not properly encoded using base64")

    return signature
Пример #12
0
    def get(self, key, default=None):
        try:
            name = key.to_text(True)
            if six.PY3 and isinstance(name, bytes):
                name = name.decode('utf-8')
            criterion = {'name': name}
            tsigkey = self.storage.find_tsigkey(context.get_current(),
                                                criterion)

            return base64.decode_as_bytes(tsigkey.secret)

        except exceptions.TsigKeyNotFound:
            return default
Пример #13
0
def get_signature(signature_data):
    """Decode the signature data and returns the signature.

    :param siganture_data: the base64-encoded signature data
    :returns: the decoded signature
    :raises: SignatureVerificationError if the signature data is malformatted
    """
    try:
        signature = base64.decode_as_bytes(signature_data)
    except (TypeError, binascii.Error):
        raise exception.SignatureVerificationError(
            'The signature data was not properly encoded using base64')

    return signature
Пример #14
0
def _store_logs(introspection_data, node_info):
    logs = introspection_data.get('logs')
    if not logs:
        LOG.warning('No logs were passed by the ramdisk',
                    data=introspection_data,
                    node_info=node_info)
        return

    if not CONF.processing.ramdisk_logs_dir:
        LOG.warning(
            'Failed to store logs received from the ramdisk '
            'because ramdisk_logs_dir configuration option '
            'is not set',
            data=introspection_data,
            node_info=node_info)
        return

    fmt_args = {
        'uuid':
        node_info.uuid if node_info is not None else 'unknown',
        'mac': (utils.get_pxe_mac(introspection_data)
                or 'unknown').replace(':', ''),
        'dt':
        datetime.datetime.utcnow(),
        'bmc': (utils.get_ipmi_address_from_data(introspection_data)
                or 'unknown')
    }

    file_name = CONF.processing.ramdisk_logs_filename_format.format(**fmt_args)

    try:
        if not os.path.exists(CONF.processing.ramdisk_logs_dir):
            os.makedirs(CONF.processing.ramdisk_logs_dir)
        with open(os.path.join(CONF.processing.ramdisk_logs_dir, file_name),
                  'wb') as fp:
            fp.write(base64.decode_as_bytes(logs))
    except EnvironmentError:
        LOG.exception('Could not store the ramdisk logs',
                      data=introspection_data,
                      node_info=node_info)
    else:
        LOG.info('Ramdisk logs were stored in file %s',
                 file_name,
                 data=introspection_data,
                 node_info=node_info)
Пример #15
0
    def __init__(self,
                 server,
                 content=None,
                 user_data=None,
                 key_pair=None,
                 extra_md=None):
        """Creation of this object should basically cover all time consuming
        collection.  Methods after that should not cause time delays due to
        network operations or lengthy cpu operations.

        The user should then get a single server and make multiple method
        calls on it.
        """
        if not content:
            content = []

        self.server = server
        self.extra_md = extra_md
        self.availability_zone = server.availability_zone

        if user_data is not None:
            self.userdata_raw = base64.decode_as_bytes(user_data)
        else:
            self.userdata_raw = None

        # TODO(zhenguo): Add hostname to server object
        self.hostname = server.name
        self.uuid = server.uuid
        self.content = {}
        self.files = []
        self.keypair = key_pair

        # 'content' is passed in from the configdrive code in
        # mogan/engine/flows/create_server.py. That's how we get the
        # injected files (personalities) in.
        for (path, contents) in content:
            key = "%04i" % len(self.content)
            self.files.append({
                'path': path,
                'content_path': "/%s/%s" % (CONTENT_DIR, key)
            })
            self.content[key] = contents

        self.route_configuration = None
Пример #16
0
    def test_get_log_file_data_as_encoded_content(self):
        log_file_content = b'Sample Data for testing SUM log output'
        file_object = tempfile.NamedTemporaryFile(delete=False)
        file_object.write(log_file_content)
        file_object.close()
        sum_controller.OUTPUT_FILES = [file_object.name]

        base64_encoded_text = (
            sum_controller._get_log_file_data_as_encoded_content())

        tar_gzipped_content = base64.decode_as_bytes(base64_encoded_text)
        tar_file = tempfile.NamedTemporaryFile(suffix='.tar.gz', delete=False)
        tar_file.write(tar_gzipped_content)
        tar_file.close()

        with tarfile.open(name=tar_file.name) as tar:
            f = tar.extractfile(file_object.name.lstrip('/'))
            self.assertEqual(log_file_content, f.read())
        os.remove(file_object.name)
        os.remove(tar_file.name)
Пример #17
0
def store_ramdisk_logs(node, logs, label=None):
    """Store the ramdisk logs.

    This method stores the ramdisk logs according to the configured
    storage backend.

    :param node: A node object.
    :param logs: A gzipped and base64 encoded string containing the
                 logs archive.
    :param label: A string to label the log file such as a clean step name.
    :raises: OSError if the directory to save the logs cannot be created.
    :raises: IOError when the logs can't be saved to the local file system.
    :raises: SwiftOperationError, if any operation with Swift fails.

    """
    logs_file_name = get_ramdisk_logs_file_name(node, label=label)
    data = base64.decode_as_bytes(logs)

    if CONF.agent.deploy_logs_storage_backend == 'local':
        if not os.path.exists(CONF.agent.deploy_logs_local_path):
            os.makedirs(CONF.agent.deploy_logs_local_path)

        log_path = os.path.join(CONF.agent.deploy_logs_local_path,
                                logs_file_name)
        with open(log_path, 'wb') as f:
            f.write(data)

    elif CONF.agent.deploy_logs_storage_backend == 'swift':
        with tempfile.NamedTemporaryFile(dir=CONF.tempdir) as f:
            f.write(data)
            f.flush()

            # convert days to seconds
            timeout = CONF.agent.deploy_logs_swift_days_to_expire * 86400
            object_headers = {'X-Delete-After': str(timeout)}
            swift_api = swift.SwiftAPI()
            swift_api.create_object(CONF.agent.deploy_logs_swift_container,
                                    logs_file_name,
                                    f.name,
                                    object_headers=object_headers)
Пример #18
0
def store_ramdisk_logs(node, logs, label=None):
    """Store the ramdisk logs.

    This method stores the ramdisk logs according to the configured
    storage backend.

    :param node: A node object.
    :param logs: A gzipped and base64 encoded string containing the
                 logs archive.
    :param label: A string to label the log file such as a clean step name.
    :raises: OSError if the directory to save the logs cannot be created.
    :raises: IOError when the logs can't be saved to the local file system.
    :raises: SwiftOperationError, if any operation with Swift fails.

    """
    logs_file_name = get_ramdisk_logs_file_name(node, label=label)
    data = base64.decode_as_bytes(logs)

    if CONF.agent.deploy_logs_storage_backend == 'local':
        if not os.path.exists(CONF.agent.deploy_logs_local_path):
            os.makedirs(CONF.agent.deploy_logs_local_path)

        log_path = os.path.join(CONF.agent.deploy_logs_local_path,
                                logs_file_name)
        with open(log_path, 'wb') as f:
            f.write(data)

    elif CONF.agent.deploy_logs_storage_backend == 'swift':
        with tempfile.NamedTemporaryFile(dir=CONF.tempdir) as f:
            f.write(data)
            f.flush()

            # convert days to seconds
            timeout = CONF.agent.deploy_logs_swift_days_to_expire * 86400
            object_headers = {'X-Delete-After': str(timeout)}
            swift_api = swift.SwiftAPI()
            swift_api.create_object(
                CONF.agent.deploy_logs_swift_container, logs_file_name,
                f.name, object_headers=object_headers)
Пример #19
0
def denormalize_after_decryption(unencrypted, content_type):
    """Translate the decrypted data into the desired content type.

    This is called when the raw keys are requested by the user. The secret
    returned from the SecretStore is the unencrypted parameter. This
    'denormalizes' the data back to its binary format.
    """

    # Process plain-text type.
    if content_type in mime_types.PLAIN_TEXT:
        # normalize text to binary string
        try:
            unencrypted = base64.decode_as_text(unencrypted)
        except UnicodeDecodeError:
            raise s.SecretAcceptNotSupportedException(content_type)

    # Process binary type.
    elif content_type in mime_types.BINARY:
        unencrypted = base64.decode_as_bytes(unencrypted)
    else:
        raise s.SecretContentTypeNotSupportedException(content_type)

    return unencrypted
Пример #20
0
def denormalize_after_decryption(unencrypted, content_type):
    """Translate the decrypted data into the desired content type.

    This is called when the raw keys are requested by the user. The secret
    returned from the SecretStore is the unencrypted parameter. This
    'denormalizes' the data back to its binary format.
    """

    # Process plain-text type.
    if content_type in mime_types.PLAIN_TEXT:
        # normalize text to binary string
        try:
            unencrypted = base64.decode_as_text(unencrypted)
        except UnicodeDecodeError:
            raise s.SecretAcceptNotSupportedException(content_type)

    # Process binary type.
    elif content_type in mime_types.BINARY:
        unencrypted = base64.decode_as_bytes(unencrypted)
    else:
        raise s.SecretContentTypeNotSupportedException(content_type)

    return unencrypted
Пример #21
0
 def deserialize(self, stream):
     return base64.decode_as_bytes(stream)
Пример #22
0
def _prepare_iso_image(task,
                       kernel_href,
                       ramdisk_href,
                       bootloader_href=None,
                       configdrive=None,
                       root_uuid=None,
                       params=None,
                       base_iso=None):
    """Prepare an ISO to boot the node.

    Build bootable ISO out of `kernel_href` and `ramdisk_href` (and
    `bootloader` if it's UEFI boot), then push built image up to Swift and
    return a temporary URL.

    If `configdrive` is specified it will be eventually written onto
    the boot ISO image.

    :param task: a TaskManager instance containing the node to act on.
    :param kernel_href: URL or Glance UUID of the kernel to use
    :param ramdisk_href: URL or Glance UUID of the ramdisk to use
    :param bootloader_href: URL or Glance UUID of the EFI bootloader
         image to use when creating UEFI bootbable ISO
    :param configdrive: URL to or a compressed blob of a ISO9660 or
        FAT-formatted OpenStack config drive image. This image will be
        written onto the built ISO image. Optional.
    :param root_uuid: optional uuid of the root partition.
    :param params: a dictionary containing 'parameter name'->'value'
        mapping to be passed to kernel command line.
    :returns: bootable ISO HTTP URL.
    :raises: MissingParameterValue, if any of the required parameters are
        missing.
    :raises: InvalidParameterValue, if any of the parameters have invalid
        value.
    :raises: ImageCreationFailed, if creating ISO image failed.
    """
    if (not kernel_href or not ramdisk_href) and not base_iso:
        raise exception.InvalidParameterValue(
            _("Unable to find kernel, ramdisk for "
              "building ISO, or explicit ISO for %(node)s") %
            {'node': task.node.uuid})

    img_handler = ImageHandler(task.node.driver)
    k_param = img_handler.kernel_params

    i_info = task.node.instance_info

    # NOTE(TheJulia): Until we support modifying a base iso, most of
    # this logic actually does nothing in the end. But it should!
    if deploy_utils.get_boot_option(task.node) == "ramdisk":
        if not base_iso:
            kernel_params = "root=/dev/ram0 text "
            kernel_params += i_info.get("ramdisk_kernel_arguments", "")
        else:
            kernel_params = None

    else:
        kernel_params = i_info.get('kernel_append_params', k_param)

    if params and not base_iso:
        kernel_params = ' '.join(
            (kernel_params, ' '.join('%s=%s' % kv for kv in params.items())))

    boot_mode = boot_mode_utils.get_boot_mode_for_deploy(task.node)

    LOG.debug(
        "Trying to create %(boot_mode)s ISO image for node %(node)s "
        "with kernel %(kernel_href)s, ramdisk %(ramdisk_href)s, "
        "bootloader %(bootloader_href)s and kernel params %(params)s"
        "", {
            'node': task.node.uuid,
            'boot_mode': boot_mode,
            'kernel_href': kernel_href,
            'ramdisk_href': ramdisk_href,
            'bootloader_href': bootloader_href,
            'params': kernel_params
        })

    with tempfile.NamedTemporaryFile(dir=CONF.tempdir,
                                     suffix='.iso') as boot_fileobj:

        with tempfile.NamedTemporaryFile(dir=CONF.tempdir,
                                         suffix='.img') as cfgdrv_fileobj:

            configdrive_href = configdrive

            # FIXME(TheJulia): This is treated as conditional with
            # a base_iso as the intent, eventually, is to support
            # injection into the supplied image.

            if configdrive and not base_iso:
                parsed_url = urlparse.urlparse(configdrive)
                if not parsed_url.scheme:
                    cfgdrv_blob = base64.decode_as_bytes(configdrive)

                    with open(cfgdrv_fileobj.name, 'wb') as f:
                        f.write(cfgdrv_blob)

                    configdrive_href = urlparse.urlunparse(
                        ('file', '', cfgdrv_fileobj.name, '', '', ''))

                LOG.debug(
                    "Built configdrive out of configdrive blob "
                    "for node %(node)s", {'node': task.node.uuid})

            boot_iso_tmp_file = boot_fileobj.name
            images.create_boot_iso(task.context,
                                   boot_iso_tmp_file,
                                   kernel_href,
                                   ramdisk_href,
                                   esp_image_href=bootloader_href,
                                   configdrive_href=configdrive_href,
                                   root_uuid=root_uuid,
                                   kernel_params=kernel_params,
                                   boot_mode=boot_mode,
                                   base_iso=base_iso)

            iso_object_name = _get_iso_image_name(task.node)

            image_url = img_handler.publish_image(boot_iso_tmp_file,
                                                  iso_object_name)

    LOG.debug(
        "Created ISO %(name)s in object store for node %(node)s, "
        "exposed as temporary URL "
        "%(url)s", {
            'node': task.node.uuid,
            'name': iso_object_name,
            'url': image_url
        })

    return image_url
Пример #23
0
    def __init__(self, instance, address=None, content=None, extra_md=None,
                 network_info=None, network_metadata=None,
                 request_context=None):
        """Creation of this object should basically cover all time consuming
        collection.  Methods after that should not cause time delays due to
        network operations or lengthy cpu operations.

        The user should then get a single instance and make multiple method
        calls on it.
        """
        if not content:
            content = []

        ctxt = context.get_admin_context()

        # NOTE(danms): Sanitize the instance to limit the amount of stuff
        # inside that may not pickle well (i.e. context). We also touch
        # some of the things we'll lazy load later to make sure we keep their
        # values in what we cache.
        instance.ec2_ids
        instance.keypairs
        instance.device_metadata
        instance = objects.Instance.obj_from_primitive(
            instance.obj_to_primitive())

        # The default value of mimeType is set to MIME_TYPE_TEXT_PLAIN
        self.set_mimetype(MIME_TYPE_TEXT_PLAIN)
        self.instance = instance
        self.extra_md = extra_md

        self.availability_zone = instance.get('availability_zone')

        secgroup_api = openstack_driver.get_openstack_security_group_driver()
        self.security_groups = secgroup_api.get_instance_security_groups(
            ctxt, instance)

        self.mappings = _format_instance_mapping(ctxt, instance)

        if instance.user_data is not None:
            self.userdata_raw = base64.decode_as_bytes(instance.user_data)
        else:
            self.userdata_raw = None

        self.address = address

        # expose instance metadata.
        self.launch_metadata = utils.instance_meta(instance)

        self.password = password.extract_password(instance)

        self.uuid = instance.uuid

        self.content = {}
        self.files = []

        # get network info, and the rendered network template
        if network_info is None:
            network_info = instance.info_cache.network_info

        # expose network metadata
        if network_metadata is None:
            self.network_metadata = netutils.get_network_metadata(network_info)
        else:
            self.network_metadata = network_metadata

        self.ip_info = netutils.get_ec2_ip_info(network_info)

        self.network_config = None
        cfg = netutils.get_injected_network_template(network_info)

        if cfg:
            key = "%04i" % len(self.content)
            self.content[key] = cfg
            self.network_config = {"name": "network_config",
                'content_path': "/%s/%s" % (CONTENT_DIR, key)}

        # 'content' is passed in from the configdrive code in
        # nova/virt/libvirt/driver.py.  That's how we get the injected files
        # (personalities) in. AFAIK they're not stored in the db at all,
        # so are not available later (web service metadata time).
        for (path, contents) in content:
            key = "%04i" % len(self.content)
            self.files.append({'path': path,
                'content_path': "/%s/%s" % (CONTENT_DIR, key)})
            self.content[key] = contents

        self.route_configuration = None

        # NOTE(mikal): the decision to not pass extra_md here like we
        # do to the StaticJSON driver is deliberate. extra_md will
        # contain the admin password for the instance, and we shouldn't
        # pass that to external services.
        self.vendordata_providers = {
            'StaticJSON': vendordata_json.JsonFileVendorData(
                instance=instance, address=address,
                extra_md=extra_md, network_info=network_info),
            'DynamicJSON': vendordata_dynamic.DynamicVendorData(
                instance=instance, address=address,
                network_info=network_info, context=request_context)
        }
Пример #24
0
 def test_denormalize_pem(self, encoded_pem, content_type):
     denorm_secret = self.denormalize(encoded_pem, content_type)
     self.assertEqual(base64.decode_as_bytes(encoded_pem), denorm_secret)
 def test_decode_as_bytes(self):
     self.assertEqual(b'text',
                      base64.decode_as_bytes(b'dGV4dA=='))
     self.assertEqual(b'text',
                      base64.decode_as_bytes(u'dGV4dA=='))
Пример #26
0
    def __init__(self, instance, address=None, content=None, extra_md=None,
                 network_info=None, network_metadata=None,
                 request_context=None):
        """Creation of this object should basically cover all time consuming
        collection.  Methods after that should not cause time delays due to
        network operations or lengthy cpu operations.

        The user should then get a single instance and make multiple method
        calls on it.
        """
        if not content:
            content = []

        ctxt = context.get_admin_context()

        # NOTE(danms): Sanitize the instance to limit the amount of stuff
        # inside that may not pickle well (i.e. context). We also touch
        # some of the things we'll lazy load later to make sure we keep their
        # values in what we cache.
        instance.ec2_ids
        instance.keypairs
        instance.device_metadata
        instance = objects.Instance.obj_from_primitive(
            instance.obj_to_primitive())

        # The default value of mimeType is set to MIME_TYPE_TEXT_PLAIN
        self.set_mimetype(MIME_TYPE_TEXT_PLAIN)
        self.instance = instance
        self.extra_md = extra_md

        self.availability_zone = instance.get('availability_zone')

        secgroup_api = openstack_driver.get_openstack_security_group_driver()
        self.security_groups = secgroup_api.get_instance_security_groups(
            ctxt, instance)

        self.mappings = _format_instance_mapping(ctxt, instance)

        if instance.user_data is not None:
            self.userdata_raw = base64.decode_as_bytes(instance.user_data)
        else:
            self.userdata_raw = None

        self.address = address

        # expose instance metadata.
        self.launch_metadata = utils.instance_meta(instance)

        self.password = password.extract_password(instance)

        self.uuid = instance.uuid

        self.content = {}
        self.files = []

        # get network info, and the rendered network template
        if network_info is None:
            network_info = instance.info_cache.network_info

        # expose network metadata
        if network_metadata is None:
            self.network_metadata = netutils.get_network_metadata(network_info)
        else:
            self.network_metadata = network_metadata

        self.ip_info = \
                ec2utils.get_ip_info_for_instance_from_nw_info(network_info)

        self.network_config = None
        cfg = netutils.get_injected_network_template(network_info)

        if cfg:
            key = "%04i" % len(self.content)
            self.content[key] = cfg
            self.network_config = {"name": "network_config",
                'content_path': "/%s/%s" % (CONTENT_DIR, key)}

        # 'content' is passed in from the configdrive code in
        # nova/virt/libvirt/driver.py.  That's how we get the injected files
        # (personalities) in. AFAIK they're not stored in the db at all,
        # so are not available later (web service metadata time).
        for (path, contents) in content:
            key = "%04i" % len(self.content)
            self.files.append({'path': path,
                'content_path': "/%s/%s" % (CONTENT_DIR, key)})
            self.content[key] = contents

        self.route_configuration = None

        # NOTE(mikal): the decision to not pass extra_md here like we
        # do to the StaticJSON driver is deliberate. extra_md will
        # contain the admin password for the instance, and we shouldn't
        # pass that to external services.
        self.vendordata_providers = {
            'StaticJSON': vendordata_json.JsonFileVendorData(
                instance=instance, address=address,
                extra_md=extra_md, network_info=network_info),
            'DynamicJSON': vendordata_dynamic.DynamicVendorData(
                instance=instance, address=address,
                network_info=network_info, context=request_context)
        }
Пример #27
0
 def test_decode_as_bytes(self):
     self.assertEqual(b'text',
                      base64.decode_as_bytes(b'dGV4dA=='))
     self.assertEqual(b'text',
                      base64.decode_as_bytes(u'dGV4dA=='))
Пример #28
0
 def test_denormalize_pem(self, encoded_pem, content_type):
     denorm_secret = self.denormalize(encoded_pem, content_type)
     self.assertEqual(base64.decode_as_bytes(encoded_pem), denorm_secret)
Пример #29
0
    def _validate(self, answer: str) -> Tuple[str, bool]:
        try:
            conn_str_bytes = base64.decode_as_bytes(answer.encode('ascii'))
        except TypeError:
            print(
                'The connection string contains non-ASCII'
                ' characters please make sure you entered'
                ' it as returned by the add-compute command.',
                file=sys.stderr)
            return answer, False

        try:
            conn_info = msgpackutils.loads(conn_str_bytes)
        except msgpack.exceptions.ExtraData:
            print(
                'The connection string contains extra data'
                ' characters please make sure you entered'
                ' it as returned by the add-compute command.',
                file=sys.stderr)
            return answer, False
        except ValueError:
            print(
                'The connection string contains extra data'
                ' characters please make sure you entered'
                ' it as returned by the add-compute command.',
                file=sys.stderr)
            return answer, False
        except msgpack.exceptions.FormatError:
            print(
                'The connection string format is invalid'
                ' please make sure you entered'
                ' it as returned by the add-compute command.',
                file=sys.stderr)
            return answer, False
        except Exception:
            print(
                'An unexpeted error has occured while trying'
                ' to decode the connection string. Please'
                ' make sure you entered it as returned by'
                ' the add-compute command and raise an'
                ' issue if the error persists',
                file=sys.stderr)
            return answer, False

        # Perform token field validation as well so that the rest of
        # the code-base can assume valid input.
        # The input can be either an IPv4 or IPv6 address or a hostname.
        hostname = conn_info.get('hostname')
        try:
            is_valid_address = self._validate_address(hostname)
            is_valid_address = True
        except ValueError:
            logger.debug('The hostname specified in the connection string is'
                         ' not an IPv4 or IPv6 address - treating it as'
                         ' a hostname.')
            is_valid_address = False
        if not is_valid_address:
            try:
                self._validate_hostname(hostname)
            except ValueError as e:
                print(
                    f'The hostname {hostname} provided in the connection'
                    f' string is invalid: {str(e)}',
                    file=sys.stderr)
                return answer, False

        fingerprint = conn_info.get('fingerprint')
        try:
            self._validate_fingerprint(fingerprint)
        except ValueError as e:
            print(
                'The clustering service TLS certificate fingerprint provided'
                f' in the connection string is invalid: {str(e)}',
                file=sys.stderr)
            return answer, False

        credential_id = conn_info.get('id')
        try:
            self._validate_credential_id(credential_id)
        except ValueError as e:
            print(
                'The credential id provided in the connection string is'
                f' invalid: {str(e)}',
                file=sys.stderr)
            return answer, False

        credential_secret = conn_info.get('secret')
        try:
            self._validate_credential_secret(credential_secret)
        except ValueError as e:
            print(
                'The credential secret provided in the connection string is'
                f' invalid: {str(e)}',
                file=sys.stderr)
            return answer, False

        self._conn_info = conn_info
        return answer, True