Esempio n. 1
0
class ManageableObject(object):

    # NOTE: When adding a field obj_make_compatible needs to be updated
    fields = {
        'reference': fields.DictOfNullableStringsField(nullable=False),
        'size': fields.IntegerField(nullable=True),
        'safe_to_manage': fields.BooleanField(default=False, nullable=True),
        'reason_not_safe': fields.StringField(nullable=True),
        'cinder_id': fields.UUIDField(nullable=True),
        'extra_info': fields.DictOfNullableStringsField(nullable=True),
    }

    @classmethod
    def from_primitives(cls, context, dict_resource):
        resource = cls()
        driverkeys = set(dict_resource.keys()) - set(cls.fields.keys())
        for name, field in cls.fields.items():
            value = dict_resource.get(name)
            resource[name] = value

        for key in driverkeys:
            if resource['extra_info'] is None:
                resource['extra_info'] = {key: dict_resource[key]}

        resource._context = context
        resource.obj_reset_changes()
        return resource
Esempio n. 2
0
class PciDevicePool(base.ZunObject):
    # Version 1.0: Initial version
    VERSION = '1.0'

    fields = {
        'product_id': fields.StringField(),
        'vendor_id': fields.StringField(),
        'numa_node': fields.IntegerField(nullable=True),
        'tags': fields.DictOfNullableStringsField(),
        'count': fields.IntegerField(),
    }

    # NOTE(pmurray): before this object existed the pci device pool data was
    # stored as a dict. For backward compatibility we need to be able to read
    # it in from a dict
    @classmethod
    def from_dict(cls, value):
        pool_dict = copy.copy(value)
        pool = cls()
        pool.vendor_id = pool_dict.pop("vendor_id")
        pool.product_id = pool_dict.pop("product_id")
        pool.numa_node = pool_dict.pop("numa_node", None)
        pool.count = pool_dict.pop("count")
        pool.tags = pool_dict
        return pool

    # NOTE(sbauza): Before using objects, pci stats was a list of
    # dictionaries not having tags. For compatibility with other modules, let's
    # create a reversible method
    def to_dict(self):
        pci_pool = base.obj_to_primitive(self)
        tags = pci_pool.pop('tags', {})
        for k, v in tags.items():
            pci_pool[k] = v
        return pci_pool
Esempio n. 3
0
class ManageableSnapshot(base.CinderObject, base.CinderObjectDictCompat,
                         ManageableObject):
    # Version 1.0: Initial version
    VERSION = '1.0'

    fields = {
        'source_reference': fields.DictOfNullableStringsField(),
    }
 def setUp(self):
     super(TestDictOfStringsNone, self).setUp()
     self.field = fields.DictOfNullableStringsField()
     self.coerce_good_values = [({'foo': 'bar'}, {'foo': 'bar'}),
                                ({'foo': 1}, {'foo': '1'}),
                                ({'foo': None}, {'foo': None})]
     self.coerce_bad_values = [{1: 'bar'}, 'foo']
     self.to_primitive_values = [({'foo': 'bar'}, {'foo': 'bar'})]
     self.from_primitive_values = [({'foo': 'bar'}, {'foo': 'bar'})]
Esempio n. 5
0
class ManageableSnapshot(base.CinderObject, base.CinderObjectDictCompat,
                         ManageableObject):
    # Version 1.0: Initial version
    VERSION = '1.0'

    # NOTE: When adding a field obj_make_compatible needs to be updated
    fields = {
        'source_reference': fields.DictOfNullableStringsField(),
    }
Esempio n. 6
0
class NetworkLink(base.DrydockPersistentObject, base.DrydockObject):

    VERSION = '1.0'

    fields = {
        'name':
        ovo_fields.StringField(),
        'site':
        ovo_fields.StringField(),
        'metalabels':
        ovo_fields.DictOfNullableStringsField(),
        'bonding_mode':
        hd_fields.NetworkLinkBondingModeField(
            default=hd_fields.NetworkLinkBondingMode.Disabled),
        'bonding_xmit_hash':
        ovo_fields.StringField(nullable=True, default='layer3+4'),
        'bonding_peer_rate':
        ovo_fields.StringField(nullable=True, default='slow'),
        'bonding_mon_rate':
        ovo_fields.IntegerField(nullable=True, default=100),
        'bonding_up_delay':
        ovo_fields.IntegerField(nullable=True, default=200),
        'bonding_down_delay':
        ovo_fields.IntegerField(nullable=True, default=200),
        'mtu':
        ovo_fields.IntegerField(default=1500),
        'linkspeed':
        ovo_fields.StringField(default='auto'),
        'trunk_mode':
        hd_fields.NetworkLinkTrunkingModeField(
            default=hd_fields.NetworkLinkTrunkingMode.Disabled),
        'native_network':
        ovo_fields.StringField(nullable=True),
        'allowed_networks':
        ovo_fields.ListOfStringsField(),
    }

    def __init__(self, **kwargs):
        super(NetworkLink, self).__init__(**kwargs)

    # NetworkLink keyed by name
    def get_id(self):
        return self.get_name()

    def get_name(self):
        return self.name
Esempio n. 7
0
class Network(base.DrydockPersistentObject, base.DrydockObject):

    VERSION = '1.0'

    fields = {
        'name': ovo_fields.StringField(),
        'site': ovo_fields.StringField(),
        'metalabels': ovo_fields.DictOfNullableStringsField(),
        'cidr': ovo_fields.StringField(),
        'vlan_id': ovo_fields.StringField(nullable=True),
        'routedomain': ovo_fields.StringField(nullable=True),
        'mtu': ovo_fields.IntegerField(nullable=True),
        'dns_domain': ovo_fields.StringField(nullable=True),
        'dns_servers': ovo_fields.StringField(nullable=True),
        # Keys of ranges are 'type', 'start', 'end'
        'ranges': ovo_fields.ListOfDictOfNullableStringsField(),
        # Keys of routes are 'subnet', 'routedomain', 'gateway', 'metric'
        'routes': ovo_fields.ListOfDictOfNullableStringsField(),
        'dhcp_relay_self_ip': ovo_fields.StringField(nullable=True),
        'dhcp_relay_upstream_target': ovo_fields.StringField(nullable=True),
    }

    def __init__(self, **kwargs):
        super(Network, self).__init__(**kwargs)

    # Network keyed on name
    def get_id(self):
        return self.get_name()

    def get_name(self):
        return self.name

    def get_default_gateway(self):
        for r in getattr(self, 'routes', []):
            if r.get('subnet', '') == '0.0.0.0/0':
                return r.get('gateway', None)

        return None
Esempio n. 8
0
class VolumeType(base.CinderPersistentObject, base.CinderObject,
                 base.CinderObjectDictCompat, base.CinderComparableObject):
    # Version 1.0: Initial version
    # Version 1.1: Changed extra_specs to DictOfNullableStringsField
    # Version 1.2: Added qos_specs
    VERSION = '1.2'

    OPTIONAL_FIELDS = ('extra_specs', 'projects', 'qos_specs')

    fields = {
        'id': fields.UUIDField(),
        'name': fields.StringField(nullable=True),
        'description': fields.StringField(nullable=True),
        'is_public': fields.BooleanField(default=True, nullable=True),
        'projects': fields.ListOfStringsField(nullable=True),
        'extra_specs': fields.DictOfNullableStringsField(nullable=True),
        'qos_specs': fields.ObjectField('QualityOfServiceSpecs',
                                        nullable=True),
    }

    def obj_make_compatible(self, primitive, target_version):
        super(VolumeType, self).obj_make_compatible(primitive, target_version)

        target_version = versionutils.convert_version_to_tuple(target_version)
        if target_version < (1, 1):
            if primitive.get('extra_specs'):
                # Before 1.1 extra_specs field didn't allowed None values. To
                # make sure we won't explode on receiver side - change Nones to
                # empty string.
                for k, v in primitive['extra_specs'].items():
                    if v is None:
                        primitive['extra_specs'][k] = ''

    @classmethod
    def _get_expected_attrs(cls, context, *args, **kwargs):
        return 'extra_specs', 'projects'

    @classmethod
    def _from_db_object(cls, context, type, db_type, expected_attrs=None):
        if expected_attrs is None:
            expected_attrs = ['extra_specs', 'projects']
        for name, field in type.fields.items():
            if name in cls.OPTIONAL_FIELDS:
                continue
            value = db_type[name]
            if isinstance(field, fields.IntegerField):
                value = value or 0
            type[name] = value

        # Get data from db_type object that was queried by joined query
        # from DB
        if 'extra_specs' in expected_attrs:
            type.extra_specs = {}
            specs = db_type.get('extra_specs')
            if specs and isinstance(specs, list):
                type.extra_specs = {item['key']: item['value']
                                    for item in specs}
            elif specs and isinstance(specs, dict):
                type.extra_specs = specs
        if 'projects' in expected_attrs:
            type.projects = db_type.get('projects', [])
        if 'qos_specs' in expected_attrs:
            qos_specs = objects.QualityOfServiceSpecs(context)
            qos_specs._from_db_object(context, qos_specs, db_type['qos_specs'])
            type.qos_specs = qos_specs
        type._context = context
        type.obj_reset_changes()
        return type

    def create(self):
        if self.obj_attr_is_set('id'):
            raise exception.ObjectActionError(action='create',
                                              reason=_('already created'))
        db_volume_type = volume_types.create(self._context, self.name,
                                             self.extra_specs,
                                             self.is_public, self.projects,
                                             self.description)
        self._from_db_object(self._context, self, db_volume_type)

    def save(self):
        updates = self.cinder_obj_get_changes()
        if updates:
            volume_types.update(self._context, self.id, self.name,
                                self.description)
            self.obj_reset_changes()

    def destroy(self):
        with self.obj_as_admin():
            updated_values = volume_types.destroy(self._context, self.id)
        self.update(updated_values)
        self.obj_reset_changes(updated_values.keys())
Esempio n. 9
0
class QualityOfServiceSpecs(base.CinderPersistentObject, base.CinderObject,
                            base.CinderObjectDictCompat,
                            base.CinderComparableObject):
    # Version
    #   1.0: Initial version
    VERSION = "1.0"

    OPTIONAL_FIELDS = ['volume_types']

    fields = {
        'id':
        fields.UUIDField(),
        'name':
        fields.StringField(),
        'consumer':
        c_fields.QoSConsumerField(default=c_fields.QoSConsumerValues.BACK_END),
        'specs':
        fields.DictOfNullableStringsField(nullable=True),
        'volume_types':
        fields.ObjectField('VolumeTypeList', nullable=True),
    }

    def __init__(self, *args, **kwargs):
        super(QualityOfServiceSpecs, self).__init__(*args, **kwargs)
        self._init_specs = {}

    def __setattr__(self, name, value):
        try:
            super(QualityOfServiceSpecs, self).__setattr__(name, value)
        except ValueError:
            if name == 'consumer':
                # Give more descriptive error message for invalid 'consumer'
                msg = (_("Valid consumer of QoS specs are: %s") %
                       c_fields.QoSConsumerField())
                raise exception.InvalidQoSSpecs(reason=msg)
            else:
                raise

    def obj_reset_changes(self, fields=None, recursive=False):
        super(QualityOfServiceSpecs, self).obj_reset_changes(fields, recursive)
        if fields is None or 'specs' in fields:
            self._init_specs = self.specs.copy() if self.specs else {}

    def obj_what_changed(self):
        changes = super(QualityOfServiceSpecs, self).obj_what_changed()

        # Do comparison of what's in the dict vs. reference to the specs object
        if self.obj_attr_is_set('id'):
            if self.specs != self._init_specs:
                changes.add('specs')
            else:
                # If both dicts are equal don't consider anything gets changed
                if 'specs' in changes:
                    changes.remove('specs')

        return changes

    def obj_get_changes(self):
        changes = super(QualityOfServiceSpecs, self).obj_get_changes()
        if 'specs' in changes:
            # For specs, we only want what has changed in the dictionary,
            # because otherwise we'll individually overwrite the DB value for
            # every key in 'specs' even if it hasn't changed
            specs_changes = {}
            for key, val in self.specs.items():
                if val != self._init_specs.get(key):
                    specs_changes[key] = val
            changes['specs'] = specs_changes

            specs_keys_removed = (set(self._init_specs.keys()) -
                                  set(self.specs.keys()))
            if specs_keys_removed:
                # Special key notifying which specs keys have been deleted
                changes['specs_keys_removed'] = specs_keys_removed

        return changes

    def obj_load_attr(self, attrname):
        if attrname not in self.OPTIONAL_FIELDS:
            raise exception.ObjectActionError(
                action='obj_load_attr',
                reason=_('attribute %s not lazy-loadable') % attrname)
        if not self._context:
            raise exception.OrphanedObjectError(method='obj_load_attr',
                                                objtype=self.obj_name())

        if attrname == 'volume_types':
            self.volume_types = objects.VolumeTypeList.get_all_types_for_qos(
                self._context, self.id)

    @classmethod
    def _from_db_object(cls,
                        context,
                        qos_spec,
                        db_qos_spec,
                        expected_attrs=None):
        if expected_attrs is None:
            expected_attrs = []

        for name, field in qos_spec.fields.items():
            if name not in cls.OPTIONAL_FIELDS:
                value = db_qos_spec.get(name)
                # 'specs' could be null if only a consumer is given, so make
                # it an empty dict instead of None
                if not value and isinstance(field, fields.DictOfStringsField):
                    value = {}
                setattr(qos_spec, name, value)

        if 'volume_types' in expected_attrs:
            volume_types = objects.VolumeTypeList.get_all_types_for_qos(
                context, db_qos_spec['id'])
            qos_spec.volume_types = volume_types

        qos_spec._context = context
        qos_spec.obj_reset_changes()
        return qos_spec

    def create(self):
        if self.obj_attr_is_set('id'):
            raise exception.ObjectActionError(action='create',
                                              reason='already created')
        updates = self.cinder_obj_get_changes()

        try:
            create_ret = db.qos_specs_create(self._context, updates)
        except db_exc.DBDataError:
            msg = _('Error writing field to database')
            LOG.exception(msg)
            raise exception.Invalid(msg)
        except db_exc.DBError:
            LOG.exception('DB error occurred when creating QoS specs.')
            raise exception.QoSSpecsCreateFailed(name=self.name,
                                                 qos_specs=self.specs)
        # Save ID with the object
        updates['id'] = create_ret['id']
        self._from_db_object(self._context, self, updates)

    def save(self):
        updates = self.cinder_obj_get_changes()
        if updates:
            if 'specs_keys_removed' in updates.keys():
                for specs_key_to_remove in updates['specs_keys_removed']:
                    db.qos_specs_item_delete(self._context, self.id,
                                             specs_key_to_remove)
                del updates['specs_keys_removed']
            db.qos_specs_update(self._context, self.id, updates)

        self.obj_reset_changes()

    def destroy(self, force=False):
        """Deletes the QoS spec.

        :param force: when force is True, all volume_type mappings for this QoS
                      are deleted.  When force is False and volume_type
                      mappings still exist, a QoSSpecsInUse exception is thrown
        """
        if self.volume_types:
            if not force:
                raise exception.QoSSpecsInUse(specs_id=self.id)
            # remove all association
            db.qos_specs_disassociate_all(self._context, self.id)
        updated_values = db.qos_specs_delete(self._context, self.id)
        self.update(updated_values)
        self.obj_reset_changes(updated_values.keys())
Esempio n. 10
0
class VolumeType(base.CinderPersistentObject, base.CinderObject,
                 base.CinderObjectDictCompat, base.CinderComparableObject):
    # Version 1.0: Initial version
    # Version 1.1: Changed extra_specs to DictOfNullableStringsField
    # Version 1.2: Added qos_specs
    # Version 1.3: Add qos_specs_id
    VERSION = '1.3'

    OPTIONAL_FIELDS = ('extra_specs', 'projects', 'qos_specs')

    # NOTE: When adding a field obj_make_compatible needs to be updated
    fields = {
        'id': fields.UUIDField(),
        'name': fields.StringField(nullable=True),
        'description': fields.StringField(nullable=True),
        'is_public': fields.BooleanField(default=True, nullable=True),
        'projects': fields.ListOfStringsField(nullable=True),
        'extra_specs': fields.DictOfNullableStringsField(nullable=True),
        'qos_specs_id': fields.UUIDField(nullable=True),
        'qos_specs': fields.ObjectField('QualityOfServiceSpecs',
                                        nullable=True),
    }

    @classmethod
    def _get_expected_attrs(cls, context, *args, **kwargs):
        return 'extra_specs', 'projects'

    @classmethod
    def _from_db_object(cls, context, type, db_type, expected_attrs=None):
        if expected_attrs is None:
            expected_attrs = ['extra_specs', 'projects']
        for name, field in type.fields.items():
            if name in cls.OPTIONAL_FIELDS:
                continue
            value = db_type[name]
            if isinstance(field, fields.IntegerField):
                value = value or 0
            type[name] = value

        # Get data from db_type object that was queried by joined query
        # from DB
        if 'extra_specs' in expected_attrs:
            type.extra_specs = {}
            specs = db_type.get('extra_specs')
            if specs and isinstance(specs, list):
                type.extra_specs = {
                    item['key']: item['value']
                    for item in specs
                }
            elif specs and isinstance(specs, dict):
                type.extra_specs = specs
        if 'projects' in expected_attrs:
            # NOTE(geguileo): Until projects stops being a polymorphic value we
            # have to do a conversion here for VolumeTypeProjects ORM instance
            # lists.
            projects = db_type.get('projects', [])
            if projects and not isinstance(projects[0], str):
                projects = [p.project_id for p in projects]
            type.projects = projects
        if 'qos_specs' in expected_attrs:
            qos_specs = objects.QualityOfServiceSpecs(context)
            qos_specs._from_db_object(context, qos_specs, db_type['qos_specs'])
            type.qos_specs = qos_specs
        type._context = context
        type.obj_reset_changes()
        return type

    def create(self):
        if self.obj_attr_is_set('id'):
            raise exception.ObjectActionError(action='create',
                                              reason=_('already created'))
        db_volume_type = volume_types.create(self._context, self.name,
                                             self.extra_specs, self.is_public,
                                             self.projects, self.description)
        self._from_db_object(self._context, self, db_volume_type)

    def save(self):
        updates = self.cinder_obj_get_changes()
        if updates:
            volume_types.update(self._context, self.id, self.name,
                                self.description)
            self.obj_reset_changes()

    def destroy(self):
        with self.obj_as_admin():
            updated_values = volume_types.destroy(self._context, self.id)
        self.update(updated_values)
        self.obj_reset_changes(updated_values.keys())

    def obj_load_attr(self, attrname):
        if attrname not in self.OPTIONAL_FIELDS:
            raise exception.ObjectActionError(
                action='obj_load_attr',
                reason=_('attribute %s not lazy-loadable') % attrname)
        if not self._context:
            raise exception.OrphanedObjectError(method='obj_load_attr',
                                                objtype=self.obj_name())

        if attrname == 'extra_specs':
            self.extra_specs = db.volume_type_extra_specs_get(
                self._context, self.id)

        elif attrname == 'qos_specs':
            if self.qos_specs_id:
                self.qos_specs = objects.QualityOfServiceSpecs.get_by_id(
                    self._context, self.qos_specs_id)
            else:
                self.qos_specs = None

        elif attrname == 'projects':
            volume_type_projects = db.volume_type_access_get_all(
                self._context, self.id)
            self.projects = [x.project_id for x in volume_type_projects]

        self.obj_reset_changes(fields=[attrname])

    @classmethod
    def get_by_name_or_id(cls, context, identity):
        orm_obj = volume_types.get_by_name_or_id(context, identity)
        expected_attrs = cls._get_expected_attrs(context)
        return cls._from_db_object(context,
                                   cls(context),
                                   orm_obj,
                                   expected_attrs=expected_attrs)

    def is_replicated(self):
        return volume_utils.is_replicated_spec(self.extra_specs)

    def is_multiattach(self):
        return volume_utils.is_multiattach_spec(self.extra_specs)
Esempio n. 11
0
class GroupType(base.CinderPersistentObject, base.CinderObject,
                base.CinderObjectDictCompat, base.CinderComparableObject):
    # Version 1.0: Initial version
    VERSION = '1.0'

    OPTIONAL_FIELDS = ['group_specs', 'projects']

    fields = {
        'id': fields.UUIDField(),
        'name': fields.StringField(nullable=True),
        'description': fields.StringField(nullable=True),
        'is_public': fields.BooleanField(default=True, nullable=True),
        'projects': fields.ListOfStringsField(nullable=True),
        'group_specs': fields.DictOfNullableStringsField(nullable=True),
    }

    @classmethod
    def _get_expected_attrs(cls, context):
        return 'group_specs', 'projects'

    @classmethod
    def _from_db_object(cls, context, type, db_type, expected_attrs=None):
        if expected_attrs is None:
            expected_attrs = []
        for name, field in type.fields.items():
            if name in cls.OPTIONAL_FIELDS:
                continue
            value = db_type[name]
            if isinstance(field, fields.IntegerField):
                value = value or 0
            type[name] = value

        # Get data from db_type object that was queried by joined query
        # from DB
        if 'group_specs' in expected_attrs:
            type.group_specs = {}
            specs = db_type.get('group_specs')
            if specs and isinstance(specs, list):
                type.group_specs = {item['key']: item['value']
                                    for item in specs}
            elif specs and isinstance(specs, dict):
                type.group_specs = specs
        if 'projects' in expected_attrs:
            type.projects = db_type.get('projects', [])

        type._context = context
        type.obj_reset_changes()
        return type

    def create(self):
        if self.obj_attr_is_set('id'):
            raise exception.ObjectActionError(action='create',
                                              reason=_('already created'))
        db_group_type = group_types.create(self._context, self.name,
                                           self.group_specs,
                                           self.is_public, self.projects,
                                           self.description)
        self._from_db_object(self._context, self, db_group_type)

    def save(self):
        updates = self.cinder_obj_get_changes()
        if updates:
            group_types.update(self._context, self.id, self.name,
                               self.description)
            self.obj_reset_changes()

    def destroy(self):
        with self.obj_as_admin():
            group_types.destroy(self._context, self.id)
Esempio n. 12
0
class VolumeAttachment(base.CinderPersistentObject, base.CinderObject,
                       base.CinderObjectDictCompat,
                       base.CinderComparableObject):
    # Version 1.0: Initial version
    # Version 1.1: Added volume relationship
    # Version 1.2: Added connection_info attribute
    VERSION = '1.2'

    OPTIONAL_FIELDS = ['volume']
    obj_extra_fields = ['project_id', 'volume_host']

    fields = {
        'id': fields.UUIDField(),
        'volume_id': fields.UUIDField(),
        'instance_uuid': fields.UUIDField(nullable=True),
        'attached_host': fields.StringField(nullable=True),
        'mountpoint': fields.StringField(nullable=True),
        'attach_time': fields.DateTimeField(nullable=True),
        'detach_time': fields.DateTimeField(nullable=True),
        'attach_status': c_fields.VolumeAttachStatusField(nullable=True),
        'attach_mode': fields.StringField(nullable=True),
        'volume': fields.ObjectField('Volume', nullable=False),
        'connection_info': fields.DictOfNullableStringsField(nullable=True)
    }

    @property
    def project_id(self):
        return self.volume.project_id

    @property
    def volume_host(self):
        return self.volume.host

    @classmethod
    def _get_expected_attrs(cls, context, *args, **kwargs):
        return ['volume']

    def obj_make_compatible(self, primitive, target_version):
        """Make a object representation compatible with target version."""
        super(VolumeAttachment,
              self).obj_make_compatible(primitive, target_version)
        target_version = versionutils.convert_version_to_tuple(target_version)
        if target_version < (1, 2):
            primitive.pop('connection_info', None)

    @classmethod
    def _from_db_object(cls,
                        context,
                        attachment,
                        db_attachment,
                        expected_attrs=None):
        if expected_attrs is None:
            expected_attrs = cls._get_expected_attrs(context)

        for name, field in attachment.fields.items():
            if name in cls.OPTIONAL_FIELDS:
                continue
            value = db_attachment.get(name)
            if isinstance(field, fields.IntegerField):
                value = value or 0
            if name == 'connection_info':
                attachment.connection_info = jsonutils.loads(
                    value) if value else None
            else:
                attachment[name] = value
        if 'volume' in expected_attrs:
            db_volume = db_attachment.get('volume')
            if db_volume:
                attachment.volume = objects.Volume._from_db_object(
                    context, objects.Volume(), db_volume)

        attachment._context = context
        attachment.obj_reset_changes()
        return attachment

    def obj_load_attr(self, attrname):
        if attrname not in self.OPTIONAL_FIELDS:
            raise exception.ObjectActionError(
                action='obj_load_attr',
                reason=_('attribute %s not lazy-loadable') % attrname)
        if not self._context:
            raise exception.OrphanedObjectError(method='obj_load_attr',
                                                objtype=self.obj_name())

        if attrname == 'volume':
            volume = objects.Volume.get_by_id(self._context, self.id)
            self.volume = volume

        self.obj_reset_changes(fields=[attrname])

    @staticmethod
    def _convert_connection_info_to_db_format(updates):
        properties = updates.pop('connection_info', None)
        if properties is not None:
            updates['connection_info'] = jsonutils.dumps(properties)

    def save(self):
        updates = self.cinder_obj_get_changes()
        if updates:
            if 'connection_info' in updates:
                self._convert_connection_info_to_db_format(updates)
            if 'volume' in updates:
                raise exception.ObjectActionError(action='save',
                                                  reason=_('volume changed'))

            db.volume_attachment_update(self._context, self.id, updates)
            self.obj_reset_changes()

    def finish_attach(self,
                      instance_uuid,
                      host_name,
                      mount_point,
                      attach_mode='rw'):
        with self.obj_as_admin():
            db_volume, updated_values = db.volume_attached(
                self._context, self.id, instance_uuid, host_name, mount_point,
                attach_mode)
        self.update(updated_values)
        self.obj_reset_changes(updated_values.keys())
        return objects.Volume._from_db_object(self._context, objects.Volume(),
                                              db_volume)

    def create(self):
        if self.obj_attr_is_set('id'):
            raise exception.ObjectActionError(action='create',
                                              reason=_('already created'))
        updates = self.cinder_obj_get_changes()
        with self.obj_as_admin():
            db_attachment = db.volume_attach(self._context, updates)
        self._from_db_object(self._context, self, db_attachment)

    def destroy(self):
        updated_values = db.attachment_destroy(self._context, self.id)
        self.update(updated_values)
        self.obj_reset_changes(updated_values.keys())
Esempio n. 13
0
class BootActionAsset(base.DrydockObject):

    VERSION = '1.0'

    fields = {
        'type': hd_fields.BootactionAssetTypeField(nullable=True),
        'path': ovo_fields.StringField(nullable=True),
        'location': ovo_fields.StringField(nullable=True),
        'data': ovo_fields.StringField(nullable=True),
        'package_list': ovo_fields.DictOfNullableStringsField(nullable=True),
        'location_pipeline': ovo_fields.ListOfStringsField(nullable=True),
        'data_pipeline': ovo_fields.ListOfStringsField(nullable=True),
        'permissions': ovo_fields.IntegerField(nullable=True),
    }

    def __init__(self, **kwargs):
        if 'permissions' in kwargs:
            mode = kwargs.pop('permissions')
            if isinstance(mode, str):
                mode = int(mode, base=8)
        else:
            mode = None

        ba_type = kwargs.get('type', None)

        package_list = None
        if ba_type == hd_fields.BootactionAssetType.PackageList:
            if isinstance(kwargs.get('data'), dict):
                package_list = self._extract_package_list(kwargs.pop('data'))
            # If the data section doesn't parse as a dictionary
            # then the package data needs to be sourced dynamically
            # Otherwise the Bootaction is invalid
            elif not kwargs.get('location'):
                raise errors.InvalidPackageListFormat(
                    "Requires a top-level mapping/object.")

        super().__init__(package_list=package_list, permissions=mode, **kwargs)
        self.rendered_bytes = None

    def render(self, nodename, site_design, action_id, action_key, design_ref):
        """Render this asset into a base64 encoded string.

        The ``nodename`` and ``action_id`` will be used to construct
        the context for evaluating the ``template`` pipeline segment

        :param nodename: the name of the node where the asset will be deployed
        :param site_design: instance of objects.SiteDesign
        :param action_id: a 128-bit ULID boot action id
        :param action_key: a 256-bit random key for API auth
        :param design_ref: The design ref this bootaction was initiated under
        """
        tpl_ctx = self._get_template_context(nodename, site_design, action_id,
                                             action_key, design_ref)

        if self.location is not None:
            rendered_location = self.execute_pipeline(
                self.location, self.location_pipeline, tpl_ctx=tpl_ctx)
            data_block = self.resolve_asset_location(rendered_location)
            if self.type == hd_fields.BootactionAssetType.PackageList:
                self._parse_package_list(data_block)
        elif self.type != hd_fields.BootactionAssetType.PackageList:
            data_block = self.data.encode('utf-8')

        if self.type != hd_fields.BootactionAssetType.PackageList:
            value = self.execute_pipeline(
                data_block, self.data_pipeline, tpl_ctx=tpl_ctx)

            if isinstance(value, str):
                value = value.encode('utf-8')
            self.rendered_bytes = value

    def _parse_package_list(self, data):
        """Parse data expecting a list of packages to install.

        Expect data to be a bytearray reprsenting a JSON or YAML
        document.

        :param data: A bytearray of data to parse
        """
        try:
            data_string = data.decode('utf-8')
            parsed_data = yaml.safe_load(data_string)

            if isinstance(parsed_data, dict):
                self.package_list = self._extract_package_list(parsed_data)
            else:
                raise errors.InvalidPackageListFormat(
                    "Package data should have a top-level mapping/object.")
        except yaml.YAMLError as ex:
            raise errors.InvalidPackageListFormat(
                "Invalid YAML in package list: %s" % str(ex))

    def _extract_package_list(self, pkg_dict):
        """Extract package data into object model.

        :param pkg_dict: a dictionary of packages to install
        """
        package_list = dict()
        for k, v in pkg_dict.items():
            if (isinstance(k, str) and (not v or isinstance(v, str))):
                package_list[k] = v
            else:
                raise errors.InvalidPackageListFormat(
                    "Keys and values must be strings.")
        return package_list

    def _get_template_context(self, nodename, site_design, action_id,
                              action_key, design_ref):
        """Create a context to be used for template rendering.

        :param nodename: The name of the node for the bootaction
        :param site_design: The full site design
        :param action_id: the ULID assigned to the boot action using this context
        :param action_key: a 256 bit random key for API auth
        :param design_ref: The design reference representing ``site_design``
        """

        return dict(
            node=self._get_node_context(nodename, site_design),
            action=self._get_action_context(action_id, action_key, design_ref))

    def _get_action_context(self, action_id, action_key, design_ref):
        """Create the action-specific context items for template rendering.

        :param action_id: ULID of this boot action
        :param action_key: random key of this boot action
        :param design_ref: Design reference representing the site design
        """
        return dict(
            action_id=ulid2.ulid_to_base32(action_id),
            action_key=action_key.hex(),
            report_url=config.config_mgr.conf.bootactions.report_url,
            design_ref=design_ref)

    def _get_node_context(self, nodename, site_design):
        """Create the node-specific context items for template rendering.

        :param nodename: name of the node this boot action targets
        :param site_design: full site design
        """
        node = site_design.get_baremetal_node(nodename)
        return dict(
            hostname=nodename,
            domain=node.get_domain(site_design),
            tags=[t for t in node.tags],
            labels={k: v
                    for (k, v) in node.owner_data.items()},
            network=self._get_node_network_context(node, site_design),
            interfaces=self._get_node_interface_context(node))

    def _get_node_network_context(self, node, site_design):
        """Create a node's network configuration context.

        :param node: node object
        :param site_design: full site design
        """
        network_context = dict()
        for a in node.addressing:
            if a.address is not None:
                network = site_design.get_network(a.network)
                network_context[a.network] = dict(
                    ip=a.address,
                    cidr=network.cidr,
                    dns_suffix=network.dns_domain)
                if a.network == node.primary_network:
                    network_context['default'] = network_context[a.network]

        return network_context

    def _get_node_interface_context(self, node):
        """Create a node's network interface context.

        :param node: the node object
        """
        interface_context = dict()
        for i in node.interfaces:
            interface_context[i.device_name] = dict(sriov=i.sriov)
            if i.sriov:
                interface_context[i.device_name]['vf_count'] = i.vf_count
                interface_context[i.device_name]['trustedmode'] = i.trustedmode
        return interface_context

    def resolve_asset_location(self, asset_url):
        """Retrieve the data asset from the url.

        Returns the asset as a bytestring.

        :param asset_url: URL to retrieve the data asset from
        """
        try:
            return ReferenceResolver.resolve_reference(asset_url)
        except Exception as ex:
            raise errors.InvalidAssetLocation(
                "Unable to resolve asset reference %s: %s" % (asset_url,
                                                              str(ex)))

    def execute_pipeline(self, data, pipeline, tpl_ctx=None):
        """Execute a pipeline against a data element.

        Returns the manipulated ``data`` element

        :param data: The data element to be manipulated by the pipeline
        :param pipeline: list of pipeline segments to execute
        :param tpl_ctx: The optional context to be made available to the ``template`` pipeline
        """
        segment_funcs = {
            'base64_encode': self.eval_base64_encode,
            'base64_decode': self.eval_base64_decode,
            'utf8_decode': self.eval_utf8_decode,
            'utf8_encode': self.eval_utf8_encode,
            'template': self.eval_template,
        }

        for s in pipeline:
            try:
                data = segment_funcs[s](data, ctx=tpl_ctx)
            except KeyError:
                raise errors.UnknownPipelineSegment(
                    "Bootaction pipeline segment %s unknown." % s)
            except Exception as ex:
                raise errors.PipelineFailure(
                    "Error when running bootaction pipeline segment %s: %s - %s"
                    % (s, type(ex).__name__, str(ex)))

        return data

    def eval_base64_encode(self, data, ctx=None):
        """Encode data as base64.

        Light weight wrapper around base64 library to shed the ctx kwarg

        :param data: data to be encoded
        :param ctx: throwaway, just allows a generic interface for pipeline segments
        """
        return base64.b64encode(data)

    def eval_base64_decode(self, data, ctx=None):
        """Decode data from base64.

        Light weight wrapper around base64 library to shed the ctx kwarg

        :param data: data to be decoded
        :param ctx: throwaway, just allows a generic interface for pipeline segments
        """
        return base64.b64decode(data)

    def eval_utf8_decode(self, data, ctx=None):
        """Decode data from bytes to UTF-8 string.

        :param data: data to be decoded
        :param ctx: throwaway, just allows a generic interface for pipeline segments
        """
        return data.decode('utf-8')

    def eval_utf8_encode(self, data, ctx=None):
        """Encode data from UTF-8 to bytes.

        :param data: data to be encoded
        :param ctx: throwaway, just allows a generic interface for pipeline segments
        """
        return data.encode('utf-8')

    def eval_template(self, data, ctx=None):
        """Evaluate data as a Jinja2 template.

        :param data: The template
        :param ctx: Optional ctx to inject into the template render
        """
        template = Template(data)
        return template.render(ctx)