def __init__(self, hybrid, contents=None, *args, **kwargs): """ Initializes the serializer, mapping field types """ allow_passwords = False if "allow_passwords" in kwargs: allow_passwords = kwargs["allow_passwords"] del kwargs["allow_passwords"] super(FullSerializer, self).__init__(*args, **kwargs) self.hybrid = hybrid for prop in self.hybrid._properties: if "password" not in prop.name or allow_passwords: self.fields[prop.name] = FullSerializer._map_type_to_field(prop.property_type) for dynamic in self.hybrid._dynamics: if contents is None or ( ("_dynamics" in contents or dynamic.name in contents) and "-{0}".format(dynamic.name) not in contents ): self.fields[dynamic.name] = serializers.Field() for relation in self.hybrid._relations: if contents is None or ( ("_relations" in contents or relation.name in contents) and "-{0}".format(relation.name) not in contents ): self.fields["{0}_guid".format(relation.name)] = serializers.CharField(required=False) relations = RelationMapper.load_foreign_relations(hybrid) if relations is not None: for key, info in relations.iteritems(): if contents is None or ( ("_relations" in contents or key in contents) and "-{0}".format(key) not in contents ): if info["list"] is True: self.fields["%s_guids" % key] = serializers.Field() else: self.fields["%s_guid" % key] = serializers.Field()
def __init__(self, hybrid, contents=None, *args, **kwargs): """ Initializes the serializer, mapping field types """ allow_passwords = False if 'allow_passwords' in kwargs: allow_passwords = kwargs['allow_passwords'] del kwargs['allow_passwords'] super(FullSerializer, self).__init__(*args, **kwargs) self.hybrid = hybrid for prop in self.hybrid._properties: if 'password' not in prop.name or allow_passwords: self.fields[prop.name] = FullSerializer._map_type_to_field( prop.property_type) for dynamic in self.hybrid._dynamics: if contents is None or ( ('_dynamics' in contents or dynamic.name in contents) and '-{0}'.format(dynamic.name) not in contents): self.fields[dynamic.name] = serializers.Field() for relation in self.hybrid._relations: if contents is None or ( ('_relations' in contents or relation.name in contents) and '-{0}'.format(relation.name) not in contents): self.fields['{0}_guid'.format( relation.name)] = serializers.CharField(required=False) relations = RelationMapper.load_foreign_relations(hybrid) if relations is not None: for key, info in relations.iteritems(): if contents is None or ( ('_relations' in contents or key in contents) and '-{0}'.format(key) not in contents): if info['list'] is True: self.fields['%s_guids' % key] = serializers.Field() else: self.fields['%s_guid' % key] = serializers.Field()
def get_properties(_cls): properties = {} properties.update({prop.name: build_property(prop) for prop in _cls._properties}) properties.update(dict(build_relation(_cls, relation) for relation in _cls._relations)) properties.update({dynamic.name: build_dynamic(_cls, dynamic) for dynamic in _cls._dynamics}) relation_info = RelationMapper.load_foreign_relations(_cls) if relation_info is not None: properties.update(dict(build_remote_relation(relation) for relation in relation_info.iteritems())) return properties
def _build_invalidations(invalidations, object_type, items): """ Builds an invalidation set out of a given object type and query items. It will use type information to build the invalidations, and not the actual data. :param invalidations: A by-ref dict containing all invalidations for this list :param object_type: The object type for this invalidations run :param items: The query items that need to be used for building invalidations """ def add(cname, field): if cname not in invalidations: invalidations[cname] = [] if field not in invalidations[cname]: invalidations[cname].append(field) for item in items: if isinstance(item, dict): # Recursive DataList._build_invalidations(invalidations, object_type, item['items']) else: path = item[0].split('.') value = object_type itemcounter = 0 for pitem in path: itemcounter += 1 class_name = value.__name__.lower() if pitem == 'guid': # The guid is a final value which can't be changed so it shouldn't be taken into account break elif pitem in (prop.name for prop in value._properties): # The pitem is in the blueprint, so it's a simple property (e.g. vmachine.name) add(class_name, pitem) break elif pitem in (relation.name for relation in value._relations): # The pitem is in the relations, so it's a relation property (e.g. vdisk.vmachine) add(class_name, pitem) relation = [relation for relation in value._relations if relation.name == pitem][0] if relation.foreign_type is not None: value = relation.foreign_type continue elif pitem.endswith('_guid') and pitem.replace('_guid', '') in (relation.name for relation in value._relations): # The pitem is the guid pointing to a relation, so it can be handled like a simple property (e.g. vdisk.vmachine_guid) add(class_name, pitem.replace('_guid', '')) break elif pitem in (dynamic.name for dynamic in value._dynamics): # The pitem is a dynamic property, which will be ignored anyway break else: # No blueprint and no relation, it might be a foreign relation (e.g. vmachine.vdisks) # this means the pitem most likely contains an index cleaned_pitem = pitem.split('[')[0] relations = RelationMapper.load_foreign_relations(value) if relations is not None: if cleaned_pitem in relations: value = Descriptor().load(relations[cleaned_pitem]['class']).get_object() add(value.__name__.lower(), relations[cleaned_pitem]['key']) continue raise RuntimeError('Invalid path given: {0}, currently pointing to {1}'.format(path, pitem))
def _build_invalidations(invalidations, object_type, items): """ Builds an invalidation set out of a given object type and query items. It will use type information to build the invalidations, and not the actual data. """ def add(class_name, field): if class_name not in invalidations: invalidations[class_name] = [] if field not in invalidations[class_name]: invalidations[class_name].append(field) for item in items: if isinstance(item, dict): # Recursive DataList._build_invalidations(invalidations, object_type, item['items']) else: path = item[0].split('.') value = object_type itemcounter = 0 for pitem in path: itemcounter += 1 class_name = value.__name__.lower() if pitem == 'guid': # The guid is a final value which can't be changed so it shouldn't be taken into account break elif pitem in (prop.name for prop in value._properties): # The pitem is in the blueprint, so it's a simple property (e.g. vmachine.name) add(class_name, pitem) break elif pitem in (relation.name for relation in value._relations): # The pitem is in the relations, so it's a relation property (e.g. vdisk.vmachine) add(class_name, pitem) relation = [relation for relation in value._relations if relation.name == pitem][0] if relation.foreign_type is not None: value = relation.foreign_type continue elif pitem.endswith('_guid') and pitem.replace('_guid', '') in (relation.name for relation in value._relations): # The pitem is the guid pointing to a relation, so it can be handled like a simple property (e.g. vdisk.vmachine_guid) add(class_name, pitem.replace('_guid', '')) break elif pitem in (dynamic.name for dynamic in value._dynamics): # The pitem is a dynamic property, which will be ignored anyway break else: # No blueprint and no relation, it might be a foreign relation (e.g. vmachine.vdisks) # this means the pitem most likely contains an index cleaned_pitem = pitem.split('[')[0] relations = RelationMapper.load_foreign_relations(value) if relations is not None: if cleaned_pitem in relations: value = Descriptor().load(relations[cleaned_pitem]['class']).get_object() add(value.__name__.lower(), relations[cleaned_pitem]['key']) continue raise RuntimeError('Invalid path given: {0}, currently pointing to {1}'.format(path, pitem))
def save(self, recursive=False, skip=None, _hook=None): """ Save the object to the persistent backend and clear cache, making use of the specified conflict resolve settings. It will also invalidate certain caches if required. For example lists pointing towards this object :param recursive: Save related sub-objects recursively :param skip: Skip certain relations :param _hook: """ if self.volatile is True: raise VolatileObjectException() tries = 0 successful = False optimistic = True last_assert = None while successful is False: tries += 1 if tries > 5: DataObject._logger.error('Raising RaceConditionException. Last AssertException: {0}'.format(last_assert)) raise RaceConditionException() invalid_fields = [] for prop in self._properties: if prop.mandatory is True and self._data[prop.name] is None: invalid_fields.append(prop.name) for relation in self._relations: if relation.mandatory is True and self._data[relation.name]['guid'] is None: invalid_fields.append(relation.name) if len(invalid_fields) > 0: raise MissingMandatoryFieldsException('Missing fields on {0}: {1}'.format(self._classname, ', '.join(invalid_fields))) if recursive: # Save objects that point to us (e.g. disk.vmachine - if this is disk) for relation in self._relations: if relation.name != skip: # disks will be skipped item = getattr(self, relation.name) if item is not None: item.save(recursive=True, skip=relation.foreign_key) # Save object we point at (e.g. machine.vdisks - if this is machine) relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): if key != skip: # machine will be skipped if info['list'] is True: for item in getattr(self, key).iterloaded(): item.save(recursive=True, skip=info['key']) else: item = getattr(self, key) if item is not None: item.save(recursive=True, skip=info['key']) validation_keys = [] for relation in self._relations: if self._data[relation.name]['guid'] is not None: if relation.foreign_type is None: cls = self.__class__ else: cls = relation.foreign_type validation_keys.append('{0}_{1}_{2}'.format(DataObject.NAMESPACE, cls.__name__.lower(), self._data[relation.name]['guid'])) try: [_ for _ in self._persistent.get_multi(validation_keys)] except KeyNotFoundException: raise ObjectNotFoundException('One of the relations specified in {0} with guid \'{1}\' was not found'.format( self.__class__.__name__, self._guid )) transaction = self._persistent.begin_transaction() if self._new is True: data = {'_version': 0} elif optimistic is True: self._persistent.assert_value(self._key, self._original, transaction=transaction) data = copy.deepcopy(self._original) else: try: current_data = self._persistent.get(self._key) except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' was deleted'.format( self.__class__.__name__, self._guid )) self._persistent.assert_value(self._key, current_data, transaction=transaction) data = copy.deepcopy(current_data) changed_fields = [] data_conflicts = [] for attribute in self._data.keys(): if attribute == '_version': continue if self._data[attribute] != self._original[attribute]: # We changed this value changed_fields.append(attribute) if attribute in data and self._original[attribute] != data[attribute]: # Some other process also wrote to the database if self._datastore_wins is None: # In case we didn't set a policy, we raise the conflicts data_conflicts.append(attribute) elif self._datastore_wins is False: # If the datastore should not win, we just overwrite the data data[attribute] = self._data[attribute] # If the datastore should win, we discard/ignore our change else: # Normal scenario, saving data data[attribute] = self._data[attribute] elif attribute not in data: data[attribute] = self._data[attribute] for attribute in data.keys(): if attribute == '_version': continue if attribute not in self._data: del data[attribute] if data_conflicts: raise ConcurrencyException('Got field conflicts while saving {0}. Conflicts: {1}'.format( self._classname, ', '.join(data_conflicts) )) # Refresh internal data structure self._data = copy.deepcopy(data) # First, update reverse index base_reverse_key = 'ovs_reverseindex_{0}_{1}|{2}|{3}' for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] new_guid = self._data[key]['guid'] if original_guid != new_guid: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() if original_guid is not None: reverse_key = base_reverse_key.format(classname, original_guid, relation.foreign_key, self.guid) self._persistent.delete(reverse_key, transaction=transaction) if new_guid is not None: reverse_key = base_reverse_key.format(classname, new_guid, relation.foreign_key, self.guid) self._persistent.assert_exists('{0}_{1}_{2}'.format(DataObject.NAMESPACE, classname, new_guid)) self._persistent.set(reverse_key, 0, transaction=transaction) # Second, invalidate property lists cache_key = '{0}_{1}|'.format(DataList.CACHELINK, self._classname) list_keys = set() cache_keys = {} for key in list(self._persistent.prefix(cache_key)): list_key, field = key.replace(cache_key, '').split('|') if list_key not in cache_keys: cache_keys[list_key] = [False, []] cache_keys[list_key][1].append(key) if field in changed_fields or self._new is True: list_keys.add(list_key) cache_keys[list_key][0] = True for list_key in list_keys: self._volatile.delete(list_key) if cache_keys[list_key][0] is True: for key in cache_keys[list_key][1]: self._persistent.delete(key, must_exist=False, transaction=transaction) if _hook is not None and hasattr(_hook, '__call__'): _hook() # Save the data self._data['_version'] += 1 try: self._persistent.set(self._key, self._data, transaction=transaction) self._persistent.apply_transaction(transaction) self._volatile.delete(self._key) successful = True except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' was deleted'.format( self.__class__.__name__, self._guid )) except AssertException as ex: last_assert = ex optimistic = False time.sleep(randint(0, 25) / 100.0) self.invalidate_dynamics() self._original = copy.deepcopy(self._data) self.dirty = False self._new = False
def __init__(self, guid=None, data=None, datastore_wins=False, volatile=False, _hook=None): """ Loads an object with a given guid. If no guid is given, a new object is generated with a new guid. * guid: The guid indicating which object should be loaded * datastoreWins: Optional boolean indicating save conflict resolve management. ** True: when saving, external modified fields will not be saved ** False: when saving, all changed data will be saved, regardless of external updates ** None: in case changed field were also changed externally, an error will be raised """ # Initialize super class super(DataObject, self).__init__() # Initialize internal fields self._frozen = False self._datastore_wins = datastore_wins self._guid = None # Guid identifier of the object self._original = {} # Original data copy self._metadata = {} # Some metadata, mainly used for unit testing self._data = {} # Internal data storage self._objects = {} # Internal objects storage # Initialize public fields self.dirty = False self.volatile = volatile # Worker fields/objects self._classname = self.__class__.__name__.lower() # Rebuild _relation types hybrid_structure = HybridRunner.get_hybrids() for relation in self._relations: if relation.foreign_type is not None: identifier = Descriptor(relation.foreign_type).descriptor['identifier'] if identifier in hybrid_structure and identifier != hybrid_structure[identifier]['identifier']: relation.foreign_type = Descriptor().load(hybrid_structure[identifier]).get_object() # Init guid self._new = False if guid is None: self._guid = str(uuid.uuid4()) self._new = True else: self._guid = str(guid) # Build base keys self._key = '{0}_{1}_{2}'.format(DataObject.NAMESPACE, self._classname, self._guid) # Worker mutexes self._mutex_version = volatile_mutex('ovs_dataversion_{0}_{1}'.format(self._classname, self._guid)) # Load data from cache or persistent backend where appropriate self._volatile = VolatileFactory.get_client() self._persistent = PersistentFactory.get_client() self._metadata['cache'] = None if self._new: self._data = {} else: if data is not None: self._data = copy.deepcopy(data) self._metadata['cache'] = None else: self._data = self._volatile.get(self._key) if self._data is None: self._metadata['cache'] = False try: self._data = self._persistent.get(self._key) except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format( self.__class__.__name__, self._guid )) else: self._metadata['cache'] = True # Set default values on new fields for prop in self._properties: if prop.name not in self._data: self._data[prop.name] = prop.default self._add_property(prop) # Load relations for relation in self._relations: if relation.name not in self._data: if relation.foreign_type is None: cls = self.__class__ else: cls = relation.foreign_type self._data[relation.name] = Descriptor(cls).descriptor self._add_relation_property(relation) # Add wrapped properties for dynamic in self._dynamics: self._add_dynamic_property(dynamic) # Load foreign keys relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): self._objects[key] = {'info': info, 'data': None} self._add_list_property(key, info['list']) if _hook is not None and hasattr(_hook, '__call__'): _hook() if not self._new: # Re-cache the object, if required if self._metadata['cache'] is False: # The data wasn't loaded from the cache, so caching is required now try: self._mutex_version.acquire(30) this_version = self._data['_version'] store_version = self._persistent.get(self._key)['_version'] if this_version == store_version: self._volatile.set(self._key, self._data) except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format( self.__class__.__name__, self._guid )) finally: self._mutex_version.release() # Freeze property creation self._frozen = True # Optionally, initialize some fields if data is not None: for prop in self._properties: if prop.name in data: setattr(self, prop.name, data[prop.name]) # Store original data self._original = copy.deepcopy(self._data)
def load_response(_fun): response_code = '200' response_schema = None return_info = _fun.ovs_metadata.get('returns', None) if return_info is not None: return_type, _return_code = return_info['returns'] if _return_code is not None: response_code = _return_code if return_type == 'object': _cls = return_info['object_type'] response_schema = {'$ref': '#/definitions/{0}'.format(_cls.__name__)} elif return_type == 'list': _cls = return_info['object_type'] class_schema = {'$ref': '#/definitions/{0}'.format(_cls.__name__)} fields = [prop.name for prop in _cls._properties] + \ ['{0}_guid'.format(rel.name) for rel in _cls._relations] + \ [dynamic.name for dynamic in _cls._dynamics] relation_info = RelationMapper.load_foreign_relations(_cls) if relation_info is not None: fields += [('{0}_guid' if rel_info['list'] is False else '{0}_guids').format(key) for key, rel_info in relation_info.iteritems()] fields = fields + ['-{0}'.format(field) for field in fields] response_schema = {'type': 'object', 'title': 'DataList', 'properties': {'_contents': {'type': 'array', 'description': 'Requested contents.', 'items': {'type': 'string'}, 'required': True, 'collectionFormat': 'csv', 'enum': ['_dynamics', '_relations', 'guid'] + fields}, '_paging': {'type': 'object', 'title': 'PagingMetadata', 'properties': {'total_items': {'type': 'integer', 'description': 'Total items available.'}, 'max_page': {'type': 'integer', 'description': 'Last page available.'}, 'end_number': {'type': 'integer', 'description': '1-based index of the last item in the current page.'}, 'current_page': {'type': 'integer', 'description': 'Current page number.'}, 'page_size': {'type': 'integer', 'description': 'Number of items in the current page.'}, 'start_number': {'type': 'integer', 'description': '1-based index of the first item in the current page'}}, 'required': ['total_items', 'max_page', 'end_number', 'current_page', 'page_size', 'start_number']}, '_sorting': {'type': 'array', 'description': 'Applied sorting', 'items': {'type': 'string'}, 'required': True, 'collectionFormat': 'csv', 'enum': ['-guid', 'guid'] + fields}, 'data': {'type': 'array', 'description': 'List of serialized {0}s.'.format(_cls.__name__), 'required': True, 'items': class_schema}}, 'required': ['_contents', '_paging', '_sorting', 'data']} else: docs = _fun.__doc__ doc_info = {} if docs is not None: for match in re.finditer(':(return|rtype): (.*)', docs, re.MULTILINE): entries = match.groups() doc_info[entries[0]] = entries[1] if return_type == 'task': task_return = '' if 'return' in doc_info: task_return = ' The task returns: {0}'.format(doc_info['return']) response_schema = {'type': 'string', 'description': 'A task identifier.{0}'.format(task_return)} elif return_type is None: response_schema = {'type': 'string'} if 'return' in doc_info: response_schema['description'] = doc_info['return'] if 'rtype' in doc_info: type_info = doc_info['rtype'] if type_info in ['int', 'long']: response_schema['type'] = 'integer' elif type_info in ['float']: response_schema['type'] = 'number' elif type_info in ['bool']: response_schema['type'] = 'boolean' elif type_info in ['str', 'basestring', 'unicode']: response_schema['type'] = 'string' elif type_info in ['dict']: response_schema['type'] = 'object' elif type_info in ['None']: response_schema = None response_code = '204' return response_code, response_schema
def load_parameters(_fun): # Parameters by @load decorators parameter_info = [] mandatory_args = _fun.ovs_metadata['load']['mandatory'] optional_args = _fun.ovs_metadata['load']['optional'] object_type = _fun.ovs_metadata['load']['object_type'] entries = ['version', 'request', 'local_storagerouter', 'pk', 'contents'] if object_type is not None: object_arg = object_type.__name__.lower() if object_arg in mandatory_args or object_arg in optional_args: parameter_info.append({'name': 'guid', 'in': 'path', 'description': 'Identifier of the object on which to call is applied.', 'required': True, 'type': 'string'}) entries.append(object_arg) for entry in entries: if entry in mandatory_args: mandatory_args.remove(entry) if entry in optional_args: optional_args.remove(entry) docs = _fun.__doc__ doc_info = {} if docs is not None: for match in re.finditer(':(param|type) (.*?): (.*)', docs, re.MULTILINE): entries = match.groups() if entries[1] not in doc_info: doc_info[entries[1]] = {} doc_info[entries[1]][entries[0]] = entries[2] for argument in mandatory_args + optional_args: info = {'name': argument, 'in': 'query', 'required': argument in mandatory_args, 'type': 'string'} if argument in doc_info: description = doc_info[argument].get('param') if description: info['description'] = description type_info = doc_info[argument].get('type') if type_info: if type_info in ['int', 'long']: info['type'] = 'integer' elif type_info in ['float']: info['type'] = 'number' elif type_info in ['bool']: info['type'] = 'boolean' elif type_info in ['str', 'basestring', 'unicode']: info['type'] = 'string' elif type_info in ['dict']: info['type'] = 'object' parameter_info.append(info) # Parameters by @returns_* decorators return_info = _fun.ovs_metadata.get('returns', None) if return_info is not None: # Extra parameters params = return_info['parameters'] fields = [] if 'contents' in params or 'sorting' in params: _cls = return_info['object_type'] fields = [prop.name for prop in _cls._properties] + \ ['{0}_guid'.format(rel.name) for rel in _cls._relations] + \ [dynamic.name for dynamic in _cls._dynamics] relation_info = RelationMapper.load_foreign_relations(_cls) if relation_info is not None: fields += [('{0}_guid' if rel_info['list'] is False else '{0}_guids').format(key) for key, rel_info in relation_info.iteritems()] fields = fields + ['-{0}'.format(field) for field in fields] for parameter in params: if parameter == 'contents': parameter_info.append({'name': 'contents', 'in': 'query', 'description': 'Specify the returned contents.', 'required': True, 'collectionFormat': 'csv', 'type': 'array', 'enum': ['_dynamics', '_relations', 'guid'] + fields, 'items': {'type': 'string'}}) elif parameter == 'paging': parameter_info.append({'name': 'page', 'in': 'query', 'description': 'Specifies the page to be returned.', 'required': False, 'type': 'integer'}) parameter_info.append({'name': 'page_size', 'in': 'query', 'description': 'Specifies the size of a page. Supported values: 10, 25, 50 and 100. Requires "page" to be set.', 'required': False, 'type': 'integer'}) elif parameter == 'sorting': parameter_info.append({'name': 'sort', 'in': 'query', 'description': 'Specifies the sorting of the list.', 'required': False, 'default': params[parameter], 'enum': ['guid', '-guid'] + fields, 'type': 'array', 'items': {'type': 'string'}}) return parameter_info
def save(self, recursive=False, skip=None, _hook=None): """ Save the object to the persistent backend and clear cache, making use of the specified conflict resolve settings. It will also invalidate certain caches if required. For example lists pointing towards this object """ if self.volatile is True: raise VolatileObjectException() tries = 0 successful = False while successful is False: invalid_fields = [] for prop in self._properties: if prop.mandatory is True and self._data[prop.name] is None: invalid_fields.append(prop.name) for relation in self._relations: if relation.mandatory is True and self._data[relation.name]['guid'] is None: invalid_fields.append(relation.name) if len(invalid_fields) > 0: raise MissingMandatoryFieldsException('Missing fields on {0}: {1}'.format(self._classname, ', '.join(invalid_fields))) if recursive: # Save objects that point to us (e.g. disk.vmachine - if this is disk) for relation in self._relations: if relation.name != skip: # disks will be skipped item = getattr(self, relation.name) if item is not None: item.save(recursive=True, skip=relation.foreign_key) # Save object we point at (e.g. machine.vdisks - if this is machine) relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): if key != skip: # machine will be skipped if info['list'] is True: for item in getattr(self, key).iterloaded(): item.save(recursive=True, skip=info['key']) else: item = getattr(self, key) if item is not None: item.save(recursive=True, skip=info['key']) for relation in self._relations: if self._data[relation.name]['guid'] is not None: if relation.foreign_type is None: cls = self.__class__ else: cls = relation.foreign_type _ = cls(self._data[relation.name]['guid']) try: data = self._persistent.get(self._key) except KeyNotFoundException: if self._new: data = {'_version': 0} else: raise ObjectNotFoundException('{0} with guid \'{1}\' was deleted'.format( self.__class__.__name__, self._guid )) changed_fields = [] data_conflicts = [] for attribute in self._data.keys(): if attribute == '_version': continue if self._data[attribute] != self._original[attribute]: # We changed this value changed_fields.append(attribute) if attribute in data and self._original[attribute] != data[attribute]: # Some other process also wrote to the database if self._datastore_wins is None: # In case we didn't set a policy, we raise the conflicts data_conflicts.append(attribute) elif self._datastore_wins is False: # If the datastore should not win, we just overwrite the data data[attribute] = self._data[attribute] # If the datastore should win, we discard/ignore our change else: # Normal scenario, saving data data[attribute] = self._data[attribute] elif attribute not in data: data[attribute] = self._data[attribute] for attribute in data.keys(): if attribute == '_version': continue if attribute not in self._data: del data[attribute] if data_conflicts: raise ConcurrencyException('Got field conflicts while saving {0}. Conflicts: {1}'.format( self._classname, ', '.join(data_conflicts) )) # Refresh internal data structure self._data = copy.deepcopy(data) caching_keys = [] try: # First, update reverse index try: self._mutex_reverseindex.acquire(60) for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] new_guid = self._data[key]['guid'] if original_guid != new_guid: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() if original_guid is not None: reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid) reverse_index = self._volatile.get(reverse_key) if reverse_index is not None: if relation.foreign_key in reverse_index: entries = reverse_index[relation.foreign_key] if self.guid in entries: entries.remove(self.guid) reverse_index[relation.foreign_key] = entries caching_keys.append(reverse_key) self._volatile.set(reverse_key, reverse_index) if new_guid is not None: reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, new_guid) reverse_index = self._volatile.get(reverse_key) if reverse_index is not None: if relation.foreign_key in reverse_index: entries = reverse_index[relation.foreign_key] if self.guid not in entries: entries.append(self.guid) reverse_index[relation.foreign_key] = entries caching_keys.append(reverse_key) self._volatile.set(reverse_key, reverse_index) else: reverse_index[relation.foreign_key] = [self.guid] caching_keys.append(reverse_key) self._volatile.set(reverse_key, reverse_index) else: reverse_index = {relation.foreign_key: [self.guid]} caching_keys.append(reverse_key) self._volatile.set(reverse_key, reverse_index) if self._new is True: reverse_key = 'ovs_reverseindex_{0}_{1}'.format(self._classname, self.guid) reverse_index = self._volatile.get(reverse_key) if reverse_index is None: reverse_index = {} relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, _ in relations.iteritems(): reverse_index[key] = [] caching_keys.append(reverse_key) self._volatile.set(reverse_key, reverse_index) finally: self._mutex_reverseindex.release() # Second, invalidate property lists try: self._mutex_listcache.acquire(60) cache_key = '{0}_{1}'.format(DataList.cachelink, self._classname) cache_list = Toolbox.try_get(cache_key, {}) change = False for list_key in cache_list.keys(): fields = cache_list[list_key] if ('__all' in fields and self._new) or list(set(fields) & set(changed_fields)): change = True self._volatile.delete(list_key) del cache_list[list_key] if change is True: self._volatile.set(cache_key, cache_list) self._persistent.set(cache_key, cache_list) finally: self._mutex_listcache.release() if _hook is not None and hasattr(_hook, '__call__'): _hook() # Save the data try: self._mutex_version.acquire(30) this_version = self._data['_version'] try: store_version = self._persistent.get(self._key)['_version'] except KeyNotFoundException: store_version = 0 if this_version == store_version: self._data['_version'] = this_version + 1 self._persistent.set(self._key, self._data) self._volatile.delete(self._key) successful = True else: tries += 1 finally: self._mutex_version.release() if tries > 5: raise SaveRaceConditionException() except: for key in caching_keys: self._volatile.delete(key) raise self.invalidate_dynamics() self._original = copy.deepcopy(self._data) self.dirty = False self._new = False
def __init__(self, guid=None, data=None, datastore_wins=False, volatile=False, _hook=None): """ Loads an object with a given guid. If no guid is given, a new object is generated with a new guid. * guid: The guid indicating which object should be loaded * datastoreWins: Optional boolean indicating save conflict resolve management. ** True: when saving, external modified fields will not be saved ** False: when saving, all changed data will be saved, regardless of external updates ** None: in case changed field were also changed externally, an error will be raised """ # Initialize super class super(DataObject, self).__init__() # Initialize internal fields self._frozen = False self._datastore_wins = datastore_wins self._guid = None # Guid identifier of the object self._original = {} # Original data copy self._metadata = {} # Some metadata, mainly used for unit testing self._data = {} # Internal data storage self._objects = {} # Internal objects storage # Initialize public fields self.dirty = False self.volatile = volatile # Worker fields/objects self._classname = self.__class__.__name__.lower() self._namespace = 'ovs_data' # Namespace of the object self._mutex_listcache = VolatileMutex('listcache_{0}'.format(self._classname)) self._mutex_reverseindex = VolatileMutex('reverseindex') # Rebuild _relation types hybrid_structure = HybridRunner.get_hybrids() for relation in self._relations: if relation.foreign_type is not None: identifier = Descriptor(relation.foreign_type).descriptor['identifier'] if identifier in hybrid_structure and identifier != hybrid_structure[identifier]['identifier']: relation.foreign_type = Descriptor().load(hybrid_structure[identifier]).get_object() # Init guid self._new = False if guid is None: self._guid = str(uuid.uuid4()) self._new = True else: guid = str(guid).lower() if re.match('^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', guid) is not None: self._guid = str(guid) else: raise ValueError('The given guid is invalid: {0}'.format(guid)) # Build base keys self._key = '{0}_{1}_{2}'.format(self._namespace, self._classname, self._guid) # Worker mutexes self._mutex_version = VolatileMutex('ovs_dataversion_{0}_{1}'.format(self._classname, self._guid)) # Load data from cache or persistent backend where appropriate self._volatile = VolatileFactory.get_client() self._persistent = PersistentFactory.get_client() self._metadata['cache'] = None if self._new: self._data = {} else: self._data = self._volatile.get(self._key) if self._data is None: Toolbox.log_cache_hit('object_load', False) self._metadata['cache'] = False try: self._data = self._persistent.get(self._key) except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format( self.__class__.__name__, self._guid )) else: Toolbox.log_cache_hit('object_load', True) self._metadata['cache'] = True # Set default values on new fields for prop in self._properties: if prop.name not in self._data: self._data[prop.name] = prop.default self._add_property(prop) # Load relations for relation in self._relations: if relation.name not in self._data: if relation.foreign_type is None: cls = self.__class__ else: cls = relation.foreign_type self._data[relation.name] = Descriptor(cls).descriptor self._add_relation_property(relation) # Add wrapped properties for dynamic in self._dynamics: self._add_dynamic_property(dynamic) # Load foreign keys relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): self._objects[key] = {'info': info, 'data': None} self._add_list_property(key, info['list']) # Store original data self._original = copy.deepcopy(self._data) if _hook is not None and hasattr(_hook, '__call__'): _hook() if not self._new: # Re-cache the object, if required if self._metadata['cache'] is False: # The data wasn't loaded from the cache, so caching is required now try: self._mutex_version.acquire(30) this_version = self._data['_version'] store_version = self._persistent.get(self._key)['_version'] if this_version == store_version: self._volatile.set(self._key, self._data) except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format( self.__class__.__name__, self._guid )) finally: self._mutex_version.release() # Freeze property creation self._frozen = True # Optionally, initialize some fields if data is not None: for prop in self._properties: if prop.name in data: setattr(self, prop.name, data[prop.name])
def __init__(self, guid=None, data=None, datastore_wins=False, volatile=False): """ Loads an object with a given guid. If no guid is given, a new object is generated with a new guid. * guid: The guid indicating which object should be loaded * datastoreWins: Optional boolean indicating save conflict resolve management. ** True: when saving, external modified fields will not be saved ** False: when saving, all changed data will be saved, regardless of external updates ** None: in case changed field were also changed externally, an error will be raised """ # Initialize super class super(DataObject, self).__init__() # Initialize internal fields self._frozen = False self._datastore_wins = datastore_wins self._guid = None # Guid identifier of the object self._original = {} # Original data copy self._metadata = {} # Some metadata, mainly used for unit testing self._data = {} # Internal data storage self._objects = {} # Internal objects storage # Initialize public fields self.dirty = False self.volatile = volatile # Worker fields/objects self._name = self.__class__.__name__.lower() self._namespace = 'ovs_data' # Namespace of the object self._mutex_listcache = VolatileMutex('listcache_{0}'.format(self._name)) self._mutex_reverseindex = VolatileMutex('reverseindex') # Rebuild _relation types hybrid_structure = HybridRunner.get_hybrids() for relation in self._relations: if relation.foreign_type is not None: identifier = Descriptor(relation.foreign_type).descriptor['identifier'] if identifier in hybrid_structure and identifier != hybrid_structure[identifier]['identifier']: relation.foreign_type = Descriptor().load(hybrid_structure[identifier]).get_object() # Init guid self._new = False if guid is None: self._guid = str(uuid.uuid4()) self._new = True else: guid = str(guid).lower() if re.match('^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', guid) is not None: self._guid = str(guid) else: raise ValueError('The given guid is invalid: {0}'.format(guid)) # Build base keys self._key = '{0}_{1}_{2}'.format(self._namespace, self._name, self._guid) # Version mutex self._mutex_version = VolatileMutex('ovs_dataversion_{0}_{1}'.format(self._name, self._guid)) # Load data from cache or persistent backend where appropriate self._volatile = VolatileFactory.get_client() self._persistent = PersistentFactory.get_client() self._metadata['cache'] = None if self._new: self._data = {} else: self._data = self._volatile.get(self._key) if self._data is None: Toolbox.log_cache_hit('object_load', False) self._metadata['cache'] = False try: self._data = self._persistent.get(self._key) except KeyNotFoundException: raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format( self.__class__.__name__, self._guid )) else: Toolbox.log_cache_hit('object_load', True) self._metadata['cache'] = True # Set default values on new fields for prop in self._properties: if prop.name not in self._data: self._data[prop.name] = prop.default self._add_property(prop) # Load relations for relation in self._relations: if relation.name not in self._data: if relation.foreign_type is None: cls = self.__class__ else: cls = relation.foreign_type self._data[relation.name] = Descriptor(cls).descriptor self._add_relation_property(relation) # Add wrapped properties for dynamic in self._dynamics: self._add_dynamic_property(dynamic) # Load foreign keys relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): self._objects[key] = {'info': info, 'data': None} self._add_list_property(key, info['list']) # Store original data self._original = copy.deepcopy(self._data) if not self._new: # Re-cache the object self._volatile.set(self._key, self._data) # Freeze property creation self._frozen = True # Optionally, initialize some fields if data is not None: for field, value in data.iteritems(): setattr(self, field, value)
def _get_referenced_fields(self, references=None, object_type=None, query_items=None): # type: (Optional[dict], Optional[type], Optional[list]) -> dict """ Retrieve an overview of all fields included in the query The fields are mapped by the class name. This mapping is used for nested properties :param references: A by-ref dict containing all references for this list (Providing None will generate a new dict) :param object_type: The object type for this references run (Providing None will use the current object type) :param query_items: The query items that need to be used for building references (Providing None will use the current query) :return: A dict containing all classes referenced within the itens together with the fields of those classes Example: {disk: ['__all', 'model'], 'storagerouter': ['name']} where disk with model X was requested on storagerouter with name Y :rtype: dict """ def add_reference(c_name, f_name): """ :param c_name of the class to add :param f_name: Name of the field to add Add a reference to the dict """ if c_name not in references: references[c_name] = [] if f_name not in references[c_name]: references[c_name].append(f_name) # All fields are referenced by default. references = references or {self._object_type.__name__.lower(): ['__all']} object_type = object_type or self._object_type query_items = query_items or self._query['items'] for query_item in query_items: if isinstance(query_item, dict): # Recursive, items are added by reference self._get_referenced_fields(references, object_type, query_item['items']) else: field = query_item[0] field_paths = field.split('.') current_object_type = object_type item_counter = 0 # Handle nesting of properties for property_item in field_paths: item_counter += 1 class_name = current_object_type.__name__.lower() # Determine which property type it is: # Options are: relation (both direction), dynamic, simple if property_item == 'guid': # The guid is a final value which can't be changed so it shouldn't be taken into account break elif property_item in (prop.name for prop in current_object_type._properties): # The property_item is in the properties, so it's a simple property (e.g. vmachine.name) add_reference(class_name, property_item) break elif property_item in (relation.name for relation in current_object_type._relations): # The property_item is in the relations, so it's a relation property (e.g. vdisk.vmachine) add_reference(class_name, property_item) relation = [relation for relation in current_object_type._relations if relation.name == property_item][0] if relation.foreign_type is not None: current_object_type = relation.foreign_type continue elif property_item.endswith('_guid') and property_item.replace('_guid', '') in (relation.name for relation in current_object_type._relations): # The property_item is the guid pointing to a relation, so it can be handled like a simple property (e.g. vdisk.vmachine_guid) add_reference(class_name, property_item.replace('_guid', '')) break elif property_item in (dynamic.name for dynamic in current_object_type._dynamics): # The property_item is a dynamic property, which will be ignored anyway break else: # No property and no relation, it might be a foreign relation (e.g. vmachine.vdisks) # this means the property_item most likely contains an index cleaned_property_item = property_item.split('[')[0] relations = RelationMapper.load_foreign_relations(current_object_type) if relations is not None: if cleaned_property_item in relations: current_object_type = Descriptor().load(relations[cleaned_property_item]['class']).get_object() add_reference(current_object_type.__name__.lower(), relations[cleaned_property_item]['key']) continue raise RuntimeError('Invalid path given: {0}, currently pointing to {1}'.format(field_paths, property_item)) return references
def delete(self, abandon=None, _hook=None): """ Delete the given object. It also invalidates certain lists :param abandon: Indicates whether(which) linked objects can be unlinked. Use with caution :param _hook: Hook """ if self.volatile is True: raise VolatileObjectException() tries = 0 successful = False last_assert = None while successful is False: tries += 1 if tries > 5: DataObject._logger.error( 'Raising RaceConditionException. Last AssertException: {0}' .format(last_assert)) raise RaceConditionException() transaction = self._persistent.begin_transaction() # Check foreign relations relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): items = getattr(self, key) if info['list'] is True: if len(items) > 0: if abandon is not None and (key in abandon or '_all' in abandon): for item in items.itersafe(): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: multi = 'are {0} items'.format(len( items)) if len(items) > 1 else 'is 1 item' raise LinkedObjectException( 'There {0} left in self.{1}'.format( multi, key)) elif items is not None: # No list (so a 1-to-1 relation), so there should be an object, or None item = items # More clear naming if abandon is not None and (key in abandon or '_all' in abandon): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException( 'There is still an item linked in self.{0}'. format(key)) # Delete the object out of the persistent store try: self._persistent.delete(self._key, transaction=transaction) except KeyNotFoundException: pass # First, update reverse index base_reverse_key = 'ovs_reverseindex_{0}_{1}|{2}|{3}' for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] if original_guid is not None: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() reverse_key = base_reverse_key.format( classname, original_guid, relation.foreign_key, self.guid) self._persistent.delete(reverse_key, transaction=transaction) # Second, invalidate property lists list_keys = [] cache_key = '{0}_{1}|'.format(DataList.CACHELINK, self._classname) for key in list(self._persistent.prefix(cache_key)): list_key, _ = key.replace(cache_key, '').split('|') if list_key not in list_keys: list_keys.append(list_key) self._volatile.delete(list_key) self._persistent.delete(key, must_exist=False, transaction=transaction) if _hook is not None and hasattr(_hook, '__call__'): _hook() try: self._persistent.apply_transaction(transaction) successful = True except KeyNotFoundException as ex: if ex.message != self._key: raise successful = True except AssertException as ex: last_assert = ex # Delete the object and its properties out of the volatile store self.invalidate_dynamics() self._volatile.delete(self._key)
def save(self, recursive=False, skip=None, _hook=None): """ Save the object to the persistent backend and clear cache, making use of the specified conflict resolve settings. It will also invalidate certain caches if required. For example lists pointing towards this object :param recursive: Save related sub-objects recursively :param skip: Skip certain relations :param _hook: """ if self.volatile is True: raise VolatileObjectException() tries = 0 successful = False optimistic = True last_assert = None while successful is False: tries += 1 if tries > 5: DataObject._logger.error( 'Raising RaceConditionException. Last AssertException: {0}' .format(last_assert)) raise RaceConditionException() invalid_fields = [] for prop in self._properties: if prop.mandatory is True and self._data[prop.name] is None: invalid_fields.append(prop.name) for relation in self._relations: if relation.mandatory is True and self._data[ relation.name]['guid'] is None: invalid_fields.append(relation.name) if len(invalid_fields) > 0: raise MissingMandatoryFieldsException( 'Missing fields on {0}: {1}'.format( self._classname, ', '.join(invalid_fields))) if recursive: # Save objects that point to us (e.g. disk.vmachine - if this is disk) for relation in self._relations: if relation.name != skip: # disks will be skipped item = getattr(self, relation.name) if item is not None: item.save(recursive=True, skip=relation.foreign_key) # Save object we point at (e.g. machine.vdisks - if this is machine) relations = RelationMapper.load_foreign_relations( self.__class__) if relations is not None: for key, info in relations.iteritems(): if key != skip: # machine will be skipped if info['list'] is True: for item in getattr(self, key).iterloaded(): item.save(recursive=True, skip=info['key']) else: item = getattr(self, key) if item is not None: item.save(recursive=True, skip=info['key']) validation_keys = [] for relation in self._relations: if self._data[relation.name]['guid'] is not None: if relation.foreign_type is None: cls = self.__class__ else: cls = relation.foreign_type validation_keys.append('{0}_{1}_{2}'.format( DataObject.NAMESPACE, cls.__name__.lower(), self._data[relation.name]['guid'])) try: [_ for _ in self._persistent.get_multi(validation_keys)] except KeyNotFoundException: raise ObjectNotFoundException( 'One of the relations specified in {0} with guid \'{1}\' was not found' .format(self.__class__.__name__, self._guid)) transaction = self._persistent.begin_transaction() if self._new is True: data = {'_version': 0} elif optimistic is True: self._persistent.assert_value(self._key, self._original, transaction=transaction) data = copy.deepcopy(self._original) else: try: current_data = self._persistent.get(self._key) except KeyNotFoundException: raise ObjectNotFoundException( '{0} with guid \'{1}\' was deleted'.format( self.__class__.__name__, self._guid)) self._persistent.assert_value(self._key, current_data, transaction=transaction) data = copy.deepcopy(current_data) changed_fields = [] data_conflicts = [] for attribute in self._data.keys(): if attribute == '_version': continue if self._data[attribute] != self._original[attribute]: # We changed this value changed_fields.append(attribute) if attribute in data and self._original[attribute] != data[ attribute]: # Some other process also wrote to the database if self._datastore_wins is None: # In case we didn't set a policy, we raise the conflicts data_conflicts.append(attribute) elif self._datastore_wins is False: # If the datastore should not win, we just overwrite the data data[attribute] = self._data[attribute] # If the datastore should win, we discard/ignore our change else: # Normal scenario, saving data data[attribute] = self._data[attribute] elif attribute not in data: data[attribute] = self._data[attribute] for attribute in data.keys(): if attribute == '_version': continue if attribute not in self._data: del data[attribute] if data_conflicts: raise ConcurrencyException( 'Got field conflicts while saving {0}. Conflicts: {1}'. format(self._classname, ', '.join(data_conflicts))) # Refresh internal data structure self._data = copy.deepcopy(data) # First, update reverse index base_reverse_key = 'ovs_reverseindex_{0}_{1}|{2}|{3}' for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] new_guid = self._data[key]['guid'] if original_guid != new_guid: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() if original_guid is not None: reverse_key = base_reverse_key.format( classname, original_guid, relation.foreign_key, self.guid) self._persistent.delete(reverse_key, transaction=transaction) if new_guid is not None: reverse_key = base_reverse_key.format( classname, new_guid, relation.foreign_key, self.guid) self._persistent.assert_exists('{0}_{1}_{2}'.format( DataObject.NAMESPACE, classname, new_guid)) self._persistent.set(reverse_key, 0, transaction=transaction) # Second, invalidate property lists cache_key = '{0}_{1}|'.format(DataList.CACHELINK, self._classname) list_keys = set() cache_keys = {} for key in list(self._persistent.prefix(cache_key)): list_key, field = key.replace(cache_key, '').split('|') if list_key not in cache_keys: cache_keys[list_key] = [False, []] cache_keys[list_key][1].append(key) if field in changed_fields or self._new is True: list_keys.add(list_key) cache_keys[list_key][0] = True for list_key in list_keys: self._volatile.delete(list_key) if cache_keys[list_key][0] is True: for key in cache_keys[list_key][1]: self._persistent.delete(key, must_exist=False, transaction=transaction) if _hook is not None and hasattr(_hook, '__call__'): _hook() # Save the data self._data['_version'] += 1 try: self._persistent.set(self._key, self._data, transaction=transaction) self._persistent.apply_transaction(transaction) self._volatile.delete(self._key) successful = True except KeyNotFoundException: raise ObjectNotFoundException( '{0} with guid \'{1}\' was deleted'.format( self.__class__.__name__, self._guid)) except AssertException as ex: last_assert = ex optimistic = False time.sleep(randint(0, 25) / 100.0) self.invalidate_dynamics() self._original = copy.deepcopy(self._data) self.dirty = False self._new = False
def delete(self, abandon=None, _hook=None): """ Delete the given object. It also invalidates certain lists :param abandon: Indicates whether(which) linked objects can be unlinked. Use with caution :param _hook: Hook """ if self.volatile is True: raise VolatileObjectException() tries = 0 successful = False last_assert = None while successful is False: tries += 1 if tries > 5: DataObject._logger.error('Raising RaceConditionException. Last AssertException: {0}'.format(last_assert)) raise RaceConditionException() transaction = self._persistent.begin_transaction() # Check foreign relations relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): items = getattr(self, key) if info['list'] is True: if len(items) > 0: if abandon is not None and (key in abandon or '_all' in abandon): for item in items.itersafe(): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: multi = 'are {0} items'.format(len(items)) if len(items) > 1 else 'is 1 item' raise LinkedObjectException('There {0} left in self.{1}'.format(multi, key)) elif items is not None: # No list (so a 1-to-1 relation), so there should be an object, or None item = items # More clear naming if abandon is not None and (key in abandon or '_all' in abandon): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException('There is still an item linked in self.{0}'.format(key)) # Delete the object out of the persistent store try: self._persistent.delete(self._key, transaction=transaction) except KeyNotFoundException: pass # First, update reverse index base_reverse_key = 'ovs_reverseindex_{0}_{1}|{2}|{3}' for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] if original_guid is not None: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() reverse_key = base_reverse_key.format(classname, original_guid, relation.foreign_key, self.guid) self._persistent.delete(reverse_key, transaction=transaction) # Second, invalidate property lists list_keys = [] cache_key = '{0}_{1}|'.format(DataList.CACHELINK, self._classname) for key in list(self._persistent.prefix(cache_key)): list_key, _ = key.replace(cache_key, '').split('|') if list_key not in list_keys: list_keys.append(list_key) self._volatile.delete(list_key) self._persistent.delete(key, must_exist=False, transaction=transaction) if _hook is not None and hasattr(_hook, '__call__'): _hook() try: self._persistent.apply_transaction(transaction) successful = True except KeyNotFoundException as ex: if ex.message != self._key: raise successful = True except AssertException as ex: last_assert = ex # Delete the object and its properties out of the volatile store self.invalidate_dynamics() self._volatile.delete(self._key)
def test_objectproperties(self): """ Validates the correctness of all hybrid objects: * They should contain all required properties * Properties should have the correct type * All dynamic properties should be implemented """ # Some stuff here to dynamically test all hybrid properties hybrid_structure = HybridRunner.get_hybrids() print '' print 'Validating hybrids...' for class_descriptor in hybrid_structure.values(): cls = Descriptor().load(class_descriptor).get_object() print '* {0}'.format(cls.__name__) relation_info = RelationMapper.load_foreign_relations(cls) remote_properties_n = [] remote_properties_1 = [] if relation_info is not None: for key, info in relation_info.iteritems(): if info['list'] is True: remote_properties_n.append(key) else: remote_properties_1.append(key) # Make sure certain attributes are correctly set self.assertIsInstance(cls._properties, list, '_properties required: {0}'.format(cls.__name__)) self.assertIsInstance(cls._relations, list, '_relations required: {0}'.format(cls.__name__)) self.assertIsInstance(cls._dynamics, list, '_dynamics required: {0}'.format(cls.__name__)) # Check types allowed_types = [int, float, long, str, bool, list, dict] for prop in cls._properties: is_allowed_type = prop.property_type in allowed_types \ or isinstance(prop.property_type, list) self.assertTrue(is_allowed_type, '_properties types in {0} should be one of {1}'.format( cls.__name__, str(allowed_types) )) for dynamic in cls._dynamics: is_allowed_type = dynamic.return_type in allowed_types \ or isinstance(dynamic.return_type, list) self.assertTrue(is_allowed_type, '_dynamics types in {0} should be one of {1}'.format( cls.__name__, str(allowed_types) )) instance = cls() for prop in cls._properties: self.assertEqual(getattr(instance, prop.name), prop.default, 'Default property set correctly') # Make sure the type can be instantiated self.assertIsNotNone(instance.guid) properties = [] for item in dir(instance): if hasattr(cls, item) and isinstance(getattr(cls, item), property): properties.append(item) # All expires should be implemented missing_props = [] for dynamic in instance._dynamics: if dynamic.name not in properties: missing_props.append(dynamic.name) self.assertEqual(len(missing_props), 0, 'Missing dynamic properties in {0}: {1}'.format(cls.__name__, missing_props)) # An all properties should be either in the blueprint, relations or expiry missing_metadata = [] for found_prop in properties: found = found_prop in [prop.name for prop in cls._properties] \ or found_prop in [relation.name for relation in cls._relations] \ or found_prop in ['{0}_guid'.format(relation.name) for relation in cls._relations] \ or found_prop in [dynamic.name for dynamic in cls._dynamics] \ or found_prop in remote_properties_n \ or found_prop in remote_properties_1 \ or found_prop in ['{0}_guids'.format(key) for key in remote_properties_n] \ or found_prop in ['{0}_guid'.format(key) for key in remote_properties_1] \ or found_prop == 'guid' if not found: missing_metadata.append(found_prop) self.assertEqual(len(missing_metadata), 0, 'Missing metadata for properties in {0}: {1}'.format(cls.__name__, missing_metadata)) instance.delete()
def delete(self, abandon=None, _hook=None): """ Delete the given object. It also invalidates certain lists """ if self.volatile is True: raise VolatileObjectException() # Check foreign relations relations = RelationMapper.load_foreign_relations(self.__class__) if relations is not None: for key, info in relations.iteritems(): items = getattr(self, key) if info['list'] is True: if len(items) > 0: if abandon is not None and (key in abandon or '_all' in abandon): for item in items.itersafe(): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: multi = 'are {0} items'.format(len(items)) if len(items) > 1 else 'is 1 item' raise LinkedObjectException('There {0} left in self.{1}'.format(multi, key)) elif items is not None: # No list (so a 1-to-1 relation), so there should be an object, or None item = items # More clear naming if abandon is not None and (key in abandon or '_all' in abandon): setattr(item, info['key'], None) try: item.save() except ObjectNotFoundException: pass else: raise LinkedObjectException('There is still an item linked in self.{0}'.format(key)) # Delete the object out of the persistent store try: self._persistent.delete(self._key) except KeyNotFoundException: pass # First, update reverse index try: self._mutex_reverseindex.acquire(60) for relation in self._relations: key = relation.name original_guid = self._original[key]['guid'] if original_guid is not None: if relation.foreign_type is None: classname = self.__class__.__name__.lower() else: classname = relation.foreign_type.__name__.lower() reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid) reverse_index = self._volatile.get(reverse_key) if reverse_index is not None: if relation.foreign_key in reverse_index: entries = reverse_index[relation.foreign_key] if self.guid in entries: entries.remove(self.guid) reverse_index[relation.foreign_key] = entries self._volatile.set(reverse_key, reverse_index) self._volatile.delete('ovs_reverseindex_{0}_{1}'.format(self._classname, self.guid)) finally: self._mutex_reverseindex.release() # Second, invalidate property lists try: self._mutex_listcache.acquire(60) cache_key = '{0}_{1}'.format(DataList.cachelink, self._classname) cache_list = Toolbox.try_get(cache_key, {}) change = False for list_key in cache_list.keys(): fields = cache_list[list_key] if '__all' in fields: change = True self._volatile.delete(list_key) del cache_list[list_key] if change is True: self._volatile.set(cache_key, cache_list) self._persistent.set(cache_key, cache_list) finally: self._mutex_listcache.release() # Delete the object and its properties out of the volatile store self.invalidate_dynamics() self._volatile.delete(self._key)
def test_objectproperties(self): """ Validates the correctness of all hybrid objects: * They should contain all required properties * Properties should have the correct type * All dynamic properties should be implemented """ # Some stuff here to dynamically test all hybrid properties hybrid_structure = HybridRunner.get_hybrids() print '' print 'Validating hybrids...' for class_descriptor in hybrid_structure.values(): cls = Descriptor().load(class_descriptor).get_object() print '* {0}'.format(cls.__name__) relation_info = RelationMapper.load_foreign_relations(cls) remote_properties_n = [] remote_properties_1 = [] if relation_info is not None: for key, info in relation_info.iteritems(): if info['list'] is True: remote_properties_n.append(key) else: remote_properties_1.append(key) # Make sure certain attributes are correctly set self.assertIsInstance( cls._properties, list, '_properties required: {0}'.format(cls.__name__)) self.assertIsInstance( cls._relations, list, '_relations required: {0}'.format(cls.__name__)) self.assertIsInstance( cls._dynamics, list, '_dynamics required: {0}'.format(cls.__name__)) # Check types allowed_types = [int, float, str, bool, list, dict] for prop in cls._properties: is_allowed_type = prop.property_type in allowed_types \ or isinstance(prop.property_type, list) self.assertTrue( is_allowed_type, '_properties types in {0} should be one of {1}'.format( cls.__name__, str(allowed_types))) for dynamic in cls._dynamics: is_allowed_type = dynamic.return_type in allowed_types \ or isinstance(dynamic.return_type, list) self.assertTrue( is_allowed_type, '_dynamics types in {0} should be one of {1}'.format( cls.__name__, str(allowed_types))) instance = cls() for prop in cls._properties: self.assertEqual(getattr(instance, prop.name), prop.default, 'Default property set correctly') # Make sure the type can be instantiated self.assertIsNotNone(instance.guid) properties = [] for item in dir(instance): if hasattr(cls, item) and isinstance(getattr(cls, item), property): properties.append(item) # All expiries should be implemented missing_props = [] for dynamic in instance._dynamics: if dynamic.name not in properties: missing_props.append(dynamic.name) else: # ... and should work _ = getattr(instance, dynamic.name) self.assertEqual( len(missing_props), 0, 'Missing dynamic properties in {0}: {1}'.format( cls.__name__, missing_props)) # An all properties should be either in the blueprint, relations or expiry missing_metadata = [] for found_prop in properties: found = found_prop in [prop.name for prop in cls._properties] \ or found_prop in [relation.name for relation in cls._relations] \ or found_prop in ['{0}_guid'.format(relation.name) for relation in cls._relations] \ or found_prop in [dynamic.name for dynamic in cls._dynamics] \ or found_prop in remote_properties_n \ or found_prop in remote_properties_1 \ or found_prop in ['{0}_guids'.format(key) for key in remote_properties_n] \ or found_prop in ['{0}_guid'.format(key) for key in remote_properties_1] \ or found_prop == 'guid' if not found: missing_metadata.append(found_prop) self.assertEqual( len(missing_metadata), 0, 'Missing metadata for properties in {0}: {1}'.format( cls.__name__, missing_metadata)) instance.delete()
def __init__(self, hybrid, contents=None, depth=None, *args, **kwargs): """ Initializes the serializer, mapping field types :param hybrid: Hybrid object to serialize :type hybrid: any (ovs.dal.hybrids.X.X) :param contents: Contents to serialize. Without contents, only the GUID is serialized When contents is given, all non-dynamic properties are serialized Further options are: - _dynamics: Include all dynamic properties - _relations: Include foreign keys and lists of primary keys of linked objects - _relations_contents: Apply the contents to the relations. The relation contents can be a bool or a new contents item - If the relations_contents=re-use: the current contents are also applied to the relation object - If the relations_contents=contents list: That item is subjected to the same rules as other contents - _relation_contents_RELATION_NAME: Apply the contents the the given relation. Same rules as _relation_contents apply here _ _relations_depth: Depth of relational serialization. Defaults to 1 when relation_contents were specified. Specifying a form of _relations_contents change the depth to 1 (if depth was 0) as the relation is to be serialized Specifying it 2 with _relations_contents given will serialize the relations of the fetched relation. This causes a chain of serializations - dynamic_property_1,dynamic_property_2 (results in static properties plus 2 dynamic properties) Properties can also be excluded by prefixing the field with '-': - contents=_dynamic,-dynamic_property_2,_relations (static properties, all dynamic properties except for dynamic_property_2 plus all relations) Relation serialization can be done by asking for it: - contents=_relations,_relations_contents=re-use All relational serialization can only be used to get data. This data will be not be set-able when deserializing :type contents: list or none :param depth: Current depth of serializing, used to serialize relations :type depth: int Kwarg parameters: :param allow_passwords: Allow the attr 'password' to be serialized :type allow_passwords: bool Parent parameters: :param instance: Instance of the object to use for updating :type instance: an :param data: Initialization data (Will be applied to the instance if an instance is given) :type data: list[dict] or dict :param many: Indicate that the given instance is to be iterated for serialization :type many: bool """ if not isinstance(contents, ContentOptions): contents = ContentOptions(contents) allow_passwords = kwargs.pop('allow_passwords', False) super(FullSerializer, self).__init__(*args, **kwargs) self.hybrid = hybrid for prop in self.hybrid._properties: if 'password' not in prop.name or allow_passwords: self.fields[prop.name] = FullSerializer._map_type_to_field( prop.property_type) for dynamic in self.hybrid._dynamics: if contents.has_content is False or ( ('_dynamics' in contents or dynamic.name in contents) and '-{0}'.format(dynamic.name) not in contents): self.fields[dynamic.name] = serializers.Field() for relation in self.hybrid._relations: if contents.has_content is False or ( ('_relations' in contents or relation.name in contents) and '-{0}'.format(relation.name) not in contents): self.fields['{0}_guid'.format( relation.name)] = serializers.CharField(required=False) foreign_relations = RelationMapper.load_foreign_relations( hybrid ) # To many side of things, items pointing towards this object if foreign_relations is not None: for key, info in foreign_relations.iteritems(): if contents.has_content is False or ( ('_relations' in contents or key in contents) and '-{0}'.format(key) not in contents): if info['list'] is True: self.fields['%s_guids' % key] = serializers.Field() else: self.fields['%s_guid' % key] = serializers.Field() # Check is a relation needs to be serialized foreign_relations = RelationMapper.load_foreign_relations( hybrid ) # To many side of things, items pointing towards this object if contents.has_content is False or (foreign_relations is None and len( hybrid._relations) == 0) or depth == 0: return # Foreign relations is a dict, relations is a relation object, need to differentiate relation_contents = contents.get_option('_relations_contents') relation_contents_options = copy.deepcopy( contents) if relation_contents == 're-use' else ContentOptions( relation_contents) relations_data = { 'foreign': foreign_relations or {}, 'own': hybrid._relations } for relation_type, relations in relations_data.iteritems(): for relation in relations: relation_key = relation.name if relation_type == 'own' else relation relation_hybrid = relation.foreign_type if relation_type == 'own' else Descriptor( ).load(relations[relation]['class']).get_object() # Possible extra content supplied for a relation relation_content = contents.get_option( '_relation_contents_{0}'.format(relation_key)) if relation_content is None and relation_contents == 're-use': relation_content_options = relation_contents_options else: relation_content_options = ContentOptions(relation_content) # Use the depth given by the contents when it's the first item to serialize relation_depth = contents.get_option( '_relations_depth', 1 if relation_content_options.has_content else 0) if depth is None else depth if relation_depth is None: # Can be None when no value is give to _relations_depth relation_depth = 0 if relation_depth == 0: continue # @Todo prevent the same one-to-one relations from being serialized multiple times? Not sure if helpful though self.fields[relation_key] = FullSerializer( relation_hybrid, contents=relation_content_options, depth=relation_depth - 1)