Esempio n. 1
0
 def _backend_property(self, function, dynamic):
     """
     Handles the internal caching of dynamic properties
     """
     caller_name = dynamic.name
     cache_key = '{0}_{1}'.format(self._key, caller_name)
     mutex = volatile_mutex(cache_key)
     try:
         cached_data = self._volatile.get(cache_key)
         if cached_data is None:
             if dynamic.locked:
                 mutex.acquire()
                 cached_data = self._volatile.get(cache_key)
             if cached_data is None:
                 function_info = inspect.getargspec(function)
                 if 'dynamic' in function_info.args:
                     dynamic_data = function(dynamic=dynamic)  # Load data from backend
                 else:
                     dynamic_data = function()
                 correct, allowed_types, given_type = Toolbox.check_type(dynamic_data, dynamic.return_type)
                 if not correct:
                     raise TypeError('Dynamic property {0} allows types {1}. {2} given'.format(
                         caller_name, str(allowed_types), given_type
                     ))
                 cached_data = {'data': dynamic_data}
                 if dynamic.timeout > 0:
                     self._volatile.set(cache_key, cached_data, dynamic.timeout)
         return Toolbox.convert_unicode_to_string(cached_data['data'])
     finally:
         mutex.release()
Esempio n. 2
0
    def verify_required_params(required_params, actual_params):
        error_messages = []
        for required_key, key_info in required_params.iteritems():
            expected_type = key_info[0]
            expected_value = key_info[1]
            optional = len(key_info) == 3 and key_info[2] is False

            if required_key not in actual_params:
                error_messages.append('Missing required param "{0}"'.format(required_key))
                continue

            actual_value = actual_params[required_key]
            if HelperToolbox.check_type(actual_value, expected_type)[0] is False:
                error_messages.append('Required param "{0}" is of type "{1}" but we expected type "{2}"'.format(required_key, type(actual_value), expected_type))
                continue

            if expected_value is None or (optional is True and actual_value in ('', None)):
                continue

            if expected_type == list:
                if type(expected_value) == Toolbox.compiled_regex_type:  # List of strings which need to match regex
                    for item in actual_value:
                        if not re.match(expected_value, item):
                            error_messages.append('Required param "{0}" has an item "{1}" which does not match regex "{2}"'.format(required_key, item, expected_value.pattern))
            else:
                if HelperToolbox.check_type(expected_value, list)[0] is True and actual_value not in expected_value:
                    error_messages.append('Required param "{0}" with value "{1}" should be 1 of the following: {2}'.format(required_key, actual_value, expected_value))
                elif HelperToolbox.check_type(expected_value, Toolbox.compiled_regex_type)[0] is True and not re.match(expected_value, actual_value):
                    error_messages.append('Required param "{0}" with value "{1}" does not match regex "{2}"'.format(required_key, actual_value, expected_value.pattern))
        if error_messages:
            raise RuntimeError('\n' + '\n'.join(error_messages))
Esempio n. 3
0
 def load_foreign_relations(object_type):
     """
     This method will return a mapping of all relations towards a certain hybrid object type.
     The resulting mapping will be stored in volatile storage so it can be fetched faster
     """
     relation_key = 'ovs_relations_{0}'.format(object_type.__name__.lower())
     volatile = VolatileFactory.get_client()
     relation_info = volatile.get(relation_key)
     if relation_info is None:
         Toolbox.log_cache_hit('relations', False)
         relation_info = {}
         hybrid_structure = HybridRunner.get_hybrids()
         for class_descriptor in hybrid_structure.values():  # Extended objects
             cls = Descriptor().load(class_descriptor).get_object()
             for relation in cls._relations:
                 if relation.foreign_type is None:
                     remote_class = cls
                 else:
                     identifier = Descriptor(relation.foreign_type).descriptor['identifier']
                     if identifier in hybrid_structure and identifier != hybrid_structure[identifier]['identifier']:
                         remote_class = Descriptor().load(hybrid_structure[identifier]).get_object()
                     else:
                         remote_class = relation.foreign_type
                 itemname = remote_class.__name__
                 if itemname == object_type.__name__:
                     relation_info[relation.foreign_key] = {'class': Descriptor(cls).descriptor,
                                                            'key': relation.name,
                                                            'list': not relation.onetoone}
         volatile.set(relation_key, relation_info)
     else:
         Toolbox.log_cache_hit('relations', True)
     return relation_info
Esempio n. 4
0
 def load_foreign_relations(object_type):
     """
     This method will return a mapping of all relations towards a certain hybrid object type.
     The resulting mapping will be stored in volatile storage so it can be fetched faster
     """
     relation_key = 'ovs_relations_{0}'.format(object_type.__name__.lower())
     volatile = VolatileFactory.get_client()
     relation_info = volatile.get(relation_key)
     if relation_info is None:
         Toolbox.log_cache_hit('relations', False)
         relation_info = {}
         hybrid_structure = HybridRunner.get_hybrids()
         for class_descriptor in hybrid_structure.values():  # Extended objects
             cls = Descriptor().load(class_descriptor).get_object()
             for relation in cls._relations:
                 if relation.foreign_type is None:
                     remote_class = cls
                 else:
                     identifier = Descriptor(relation.foreign_type).descriptor['identifier']
                     if identifier in hybrid_structure and identifier != hybrid_structure[identifier]['identifier']:
                         remote_class = Descriptor().load(hybrid_structure[identifier]).get_object()
                     else:
                         remote_class = relation.foreign_type
                 itemname = remote_class.__name__
                 if itemname == object_type.__name__:
                     relation_info[relation.foreign_key] = {'class': Descriptor(cls).descriptor,
                                                            'key': relation.name,
                                                            'list': not relation.onetoone}
         volatile.set(relation_key, relation_info)
     else:
         Toolbox.log_cache_hit('relations', True)
     return relation_info
Esempio n. 5
0
    def verify_required_params(required_params, actual_params, exact_match=False):
        """
        Verify whether the actual parameters match the required parameters
        :param required_params: Required parameters which actual parameters have to meet
        :param actual_params: Actual parameters to check for validity
        :param exact_match: Keys of both dictionaries must be identical
        :return: None
        """
        error_messages = []
        if not isinstance(required_params, dict) or not isinstance(actual_params, dict):
            raise RuntimeError('Required and actual parameters must be of type dictionary')

        if exact_match is True:
            for key in set(actual_params.keys()).difference(required_params.keys()):
                error_messages.append('Missing key "{0}" in required_params'.format(key))

        for required_key, key_info in required_params.iteritems():
            expected_type = key_info[0]
            expected_value = key_info[1]
            optional = len(key_info) == 3 and key_info[2] is False

            if optional is True and (required_key not in actual_params or actual_params[required_key] in ('', None)):
                continue

            if required_key not in actual_params:
                error_messages.append('Missing required param "{0}" in actual parameters'.format(required_key))
                continue

            actual_value = actual_params[required_key]
            if HelperToolbox.check_type(actual_value, expected_type)[0] is False:
                error_messages.append('Required param "{0}" is of type "{1}" but we expected type "{2}"'.format(required_key, type(actual_value), expected_type))
                continue

            if expected_value is None:
                continue

            if expected_type == list:
                if type(expected_value) == Toolbox.compiled_regex_type:  # List of strings which need to match regex
                    for item in actual_value:
                        if not re.match(expected_value, item):
                            error_messages.append('Required param "{0}" has an item "{1}" which does not match regex "{2}"'.format(required_key, item, expected_value.pattern))
            elif expected_type == dict:
                Toolbox.verify_required_params(expected_value, actual_params[required_key])
            elif expected_type == int:
                if isinstance(expected_value, list) and actual_value not in expected_value:
                    error_messages.append('Required param "{0}" with value "{1}" should be 1 of the following: {2}'.format(required_key, actual_value, expected_value))
                if isinstance(expected_value, dict):
                    minimum = expected_value.get('min', sys.maxint * -1)
                    maximum = expected_value.get('max', sys.maxint)
                    if not minimum <= actual_value <= maximum:
                        error_messages.append('Required param "{0}" with value "{1}" should be in range: {2} - {3}'.format(required_key, actual_value, minimum, maximum))
            else:
                if HelperToolbox.check_type(expected_value, list)[0] is True and actual_value not in expected_value:
                    error_messages.append('Required param "{0}" with value "{1}" should be 1 of the following: {2}'.format(required_key, actual_value, expected_value))
                elif HelperToolbox.check_type(expected_value, Toolbox.compiled_regex_type)[0] is True and not re.match(expected_value, actual_value):
                    error_messages.append('Required param "{0}" with value "{1}" does not match regex "{2}"'.format(required_key, actual_value, expected_value.pattern))
        if error_messages:
            raise RuntimeError('\n' + '\n'.join(error_messages))
Esempio n. 6
0
    def verify_required_params(required_params, actual_params):
        error_messages = []
        for required_key, key_info in required_params.iteritems():
            expected_type = key_info[0]
            expected_value = key_info[1]
            optional = len(key_info) == 3 and key_info[2] is False

            if required_key not in actual_params:
                error_messages.append(
                    'Missing required param "{0}"'.format(required_key))
                continue

            actual_value = actual_params[required_key]
            if HelperToolbox.check_type(actual_value,
                                        expected_type)[0] is False:
                error_messages.append(
                    'Required param "{0}" is of type "{1}" but we expected type "{2}"'
                    .format(required_key, type(actual_value), expected_type))
                continue

            if expected_value is None or (optional is True
                                          and actual_value in ('', None)):
                continue

            if expected_type == list:
                if type(
                        expected_value
                ) == Toolbox.compiled_regex_type:  # List of strings which need to match regex
                    for item in actual_value:
                        if not re.match(expected_value, item):
                            error_messages.append(
                                'Required param "{0}" has an item "{1}" which does not match regex "{2}"'
                                .format(required_key, item,
                                        expected_value.pattern))
            else:
                if HelperToolbox.check_type(
                        expected_value, list
                )[0] is True and actual_value not in expected_value:
                    error_messages.append(
                        'Required param "{0}" with value "{1}" should be 1 of the following: {2}'
                        .format(required_key, actual_value, expected_value))
                elif HelperToolbox.check_type(
                        expected_value, Toolbox.compiled_regex_type
                )[0] is True and not re.match(expected_value, actual_value):
                    error_messages.append(
                        'Required param "{0}" with value "{1}" does not match regex "{2}"'
                        .format(required_key, actual_value,
                                expected_value.pattern))
        if error_messages:
            raise RuntimeError('\n' + '\n'.join(error_messages))
Esempio n. 7
0
 def _set_property(self, prop, value):
     """
     Setter for a simple property that will validate the type
     """
     self.dirty = True
     if value is None:
         self._data[prop.name] = value
     else:
         correct, allowed_types, given_type = Toolbox.check_type(value, prop.property_type)
         if correct:
             self._data[prop.name] = value
         else:
             raise TypeError('Property {0} allows types {1}. {2} given'.format(
                 prop.name, str(allowed_types), given_type
             ))
Esempio n. 8
0
 def _set_property(self, prop, value):
     """
     Setter for a simple property that will validate the type
     """
     self.dirty = True
     if value is None:
         self._data[prop.name] = value
     else:
         correct, allowed_types, given_type = Toolbox.check_type(value, prop.property_type)
         if correct:
             self._data[prop.name] = value
         else:
             raise TypeError('Property {0} allows types {1}. {2} given'.format(
                 prop.name, str(allowed_types), given_type
             ))
Esempio n. 9
0
    def test_lotsofobjects(self):
        """
        A test creating, linking and querying a lot of objects
        """
        print ''
        print 'cleaning up'
        self._clean_all()
        print 'start test'
        tstart = time.time()
        if getattr(LotsOfObjects, 'amount_of_machines', None) is None:
            LotsOfObjects.amount_of_machines = 50
        if getattr(LotsOfObjects, 'amount_of_disks', None) is None:
            LotsOfObjects.amount_of_disks = 5
        load_data = True
        mguids = []
        if load_data:
            print '\nstart loading data'
            start = time.time()
            runtimes = []
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                mstart = time.time()
                machine = TestMachine()
                machine.name = 'machine_{0}'.format(i)
                machine.save()
                mguids.append(machine.guid)
                for ii in xrange(0, int(LotsOfObjects.amount_of_disks)):
                    disk = TestDisk()
                    disk.name = 'disk_{0}_{1}'.format(i, ii)
                    disk.size = ii * 100
                    disk.machine = machine
                    disk.save()
                avgitemspersec = ((i + 1) * LotsOfObjects.amount_of_disks) / (time.time() - start)
                itemspersec = LotsOfObjects.amount_of_disks / (time.time() - mstart)
                runtimes.append(itemspersec)
                LotsOfObjects._print_progress('* machine {0}/{1} (run: {2} dps, avg: {3} dps)'.format(i + 1, int(LotsOfObjects.amount_of_machines), round(itemspersec, 2), round(avgitemspersec, 2)))
            runtimes.sort()
            print '\nloading done ({0}s). min: {1} dps, max: {2} dps'.format(round(time.time() - tstart, 2), round(runtimes[1], 2), round(runtimes[-2], 2))

        test_queries = True
        if test_queries:
            print '\nstart queries'
            start = time.time()
            runtimes = []
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                mstart = time.time()
                machine = TestMachine(mguids[i])
                assert len(machine.disks) == LotsOfObjects.amount_of_disks, 'Not all disks were retrieved ({0})'.format(len(machine.disks))
                avgitemspersec = ((i + 1) * LotsOfObjects.amount_of_disks) / (time.time() - start)
                itemspersec = LotsOfObjects.amount_of_disks / (time.time() - mstart)
                runtimes.append(itemspersec)
                LotsOfObjects._print_progress('* machine {0}/{1} (run: {2} dps, avg: {3} dps)'.format(i + 1, int(LotsOfObjects.amount_of_machines), round(itemspersec, 2), round(avgitemspersec, 2)))
            runtimes.sort()
            print '\ncompleted ({0}s). min: {1} dps, max: {2} dps'.format(round(time.time() - tstart, 2), round(runtimes[1], 2), round(runtimes[-2], 2))

            print '\nstart full query on disk property'
            start = time.time()
            dlist = DataList(TestDisk, {'type': DataList.where_operator.AND,
                                        'items': [('size', DataList.operator.GT, 100),
                                                  ('size', DataList.operator.LT, (LotsOfObjects.amount_of_disks - 1) * 100)]})
            amount = len(dlist)
            assert amount == (LotsOfObjects.amount_of_disks - 3) * LotsOfObjects.amount_of_machines, 'Incorrect amount of disks. Found {0} instead of {1}'.format(amount, int((LotsOfObjects.amount_of_disks - 3) * LotsOfObjects.amount_of_machines))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(round(time.time() - tstart, 2), round(seconds_passed, 2), round(LotsOfObjects.amount_of_machines * LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nloading disks (all)'
            start = time.time()
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                machine = TestMachine(mguids[i])
                _ = [_ for _ in machine.disks]
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(round(time.time() - tstart, 2), round(seconds_passed, 2), round(LotsOfObjects.amount_of_machines * LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nstart full query on disk property (using cached objects)'
            dlist._volatile.delete(dlist._key)
            start = time.time()
            dlist = DataList(TestDisk, {'type': DataList.where_operator.AND,
                                        'items': [('size', DataList.operator.GT, 100),
                                                  ('size', DataList.operator.LT, (LotsOfObjects.amount_of_disks - 1) * 100)]})
            amount = len(dlist)
            assert amount == (LotsOfObjects.amount_of_disks - 3) * LotsOfObjects.amount_of_machines, 'Incorrect amount of disks. Found {0} instead of {1}'.format(amount, int((LotsOfObjects.amount_of_disks - 3) * LotsOfObjects.amount_of_machines))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(round(time.time() - tstart, 2), round(seconds_passed, 2), round(LotsOfObjects.amount_of_machines * LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nstart property sort'
            dlist = DataList(TestDisk, {'type': DataList.where_operator.AND,
                                        'items': []})
            start = time.time()
            dlist.sort(key=lambda a: Toolbox.extract_key(a, 'size'))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(round(time.time() - tstart, 2), round(seconds_passed, 2), round(LotsOfObjects.amount_of_machines * LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nstart dynamic sort'
            dlist._volatile.delete(dlist._key)
            dlist = DataList(TestDisk, {'type': DataList.where_operator.AND,
                                        'items': []})
            start = time.time()
            dlist.sort(key=lambda a: Toolbox.extract_key(a, 'predictable'))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(round(time.time() - tstart, 2), round(seconds_passed, 2), round(LotsOfObjects.amount_of_machines * LotsOfObjects.amount_of_disks / seconds_passed, 2))

        clean_data = True
        if clean_data:
            print '\ncleaning up'
            start = time.time()
            runtimes = []
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                mstart = time.time()
                machine = TestMachine(mguids[i])
                for disk in machine.disks:
                    disk.delete()
                machine.delete()
                avgitemspersec = ((i + 1) * LotsOfObjects.amount_of_disks) / (time.time() - start)
                itemspersec = LotsOfObjects.amount_of_disks / (time.time() - mstart)
                runtimes.append(itemspersec)
                LotsOfObjects._print_progress('* machine {0}/{1} (run: {2} dps, avg: {3} dps)'.format(i + 1, int(LotsOfObjects.amount_of_machines), round(itemspersec, 2), round(avgitemspersec, 2)))
            runtimes.sort()
            print '\ncompleted ({0}s). min: {1} dps, max: {2} dps'.format(round(time.time() - tstart, 2), round(runtimes[1], 2), round(runtimes[-2], 2))
Esempio n. 10
0
    def verify_required_params(required_params, actual_params):
        error_messages = []
        for required_key, key_info in required_params.iteritems():
            expected_type = key_info[0]
            expected_value = key_info[1]
            optional = len(key_info) == 3 and key_info[2] is False

            if optional is True and (required_key not in actual_params or actual_params[required_key] in ("", None)):
                continue

            if required_key not in actual_params:
                error_messages.append('Missing required param "{0}"'.format(required_key))
                continue

            actual_value = actual_params[required_key]
            if HelperToolbox.check_type(actual_value, expected_type)[0] is False:
                error_messages.append(
                    'Required param "{0}" is of type "{1}" but we expected type "{2}"'.format(
                        required_key, type(actual_value), expected_type
                    )
                )
                continue

            if expected_value is None:
                continue

            if expected_type == list:
                if type(expected_value) == Toolbox.compiled_regex_type:  # List of strings which need to match regex
                    for item in actual_value:
                        if not re.match(expected_value, item):
                            error_messages.append(
                                'Required param "{0}" has an item "{1}" which does not match regex "{2}"'.format(
                                    required_key, item, expected_value.pattern
                                )
                            )
            elif expected_type == dict:
                Toolbox.verify_required_params(expected_value, actual_params[required_key])
            elif expected_type == int:
                if isinstance(expected_value, list) and actual_value not in expected_value:
                    error_messages.append(
                        'Required param "{0}" with value "{1}" should be 1 of the following: {2}'.format(
                            required_key, actual_value, expected_value
                        )
                    )
                if isinstance(expected_value, dict):
                    minimum = expected_value.get("min", sys.maxint * -1)
                    maximum = expected_value.get("max", sys.maxint)
                    if not minimum <= actual_value <= maximum:
                        error_messages.append(
                            'Required param "{0}" with value "{1}" should be in range: {2} - {3}'.format(
                                required_key, actual_value, minimum, maximum
                            )
                        )
            else:
                if HelperToolbox.check_type(expected_value, list)[0] is True and actual_value not in expected_value:
                    error_messages.append(
                        'Required param "{0}" with value "{1}" should be 1 of the following: {2}'.format(
                            required_key, actual_value, expected_value
                        )
                    )
                elif HelperToolbox.check_type(expected_value, Toolbox.compiled_regex_type)[0] is True and not re.match(
                    expected_value, actual_value
                ):
                    error_messages.append(
                        'Required param "{0}" with value "{1}" does not match regex "{2}"'.format(
                            required_key, actual_value, expected_value.pattern
                        )
                    )
        if error_messages:
            raise RuntimeError("\n" + "\n".join(error_messages))
Esempio n. 11
0
    def get_relation_set(remote_class, remote_key, own_class, own_key,
                         own_guid):
        """
        This method will get a DataList for a relation.
        On a cache miss, the relation DataList will be rebuild and due to the nature of the full table scan, it will
        update all relations in the mean time.
        """

        # Example:
        # * remote_class = vDisk
        # * remote_key = vmachine
        # * own_class = vMachine
        # * own_key = vdisks
        # Called to load the vMachine.vdisks list (resulting in a possible scan of vDisk objects)
        # * own_guid = this vMachine object's guid

        volatile = VolatileFactory.get_client()
        own_name = own_class.__name__.lower()
        datalist = DataList({},
                            '{0}_{1}_{2}'.format(own_name, own_guid,
                                                 remote_key),
                            load=False)
        reverse_key = 'ovs_reverseindex_{0}_{1}'.format(own_name, own_guid)

        # Check whether the requested information is available in cache
        reverse_index = volatile.get(reverse_key)
        if reverse_index is not None and own_key in reverse_index:
            Toolbox.log_cache_hit('datalist', True)
            datalist.data = reverse_index[own_key]
            datalist.from_cache = True
            return datalist

        Toolbox.log_cache_hit('datalist', False)
        mutex = VolatileMutex('reverseindex')
        remote_name = remote_class.__name__.lower()
        blueprint_object = remote_class()  # vDisk object
        foreign_guids = {}

        remote_namespace = blueprint_object._namespace
        remote_keys = DataList.get_pks(remote_namespace, remote_name)
        handled_flows = []
        for guid in remote_keys:
            try:
                instance = remote_class(guid)
                for relation in blueprint_object._relations:  # E.g. vmachine or vpool relation
                    if relation.foreign_type is None:
                        classname = remote_name
                        foreign_namespace = blueprint_object._namespace
                    else:
                        classname = relation.foreign_type.__name__.lower()
                        foreign_namespace = relation.foreign_type()._namespace
                    if classname not in foreign_guids:
                        foreign_guids[classname] = DataList.get_pks(
                            foreign_namespace, classname)
                    flow = '{0}_{1}'.format(classname, relation.foreign_key)
                    if flow not in handled_flows:
                        handled_flows.append(flow)
                        try:
                            mutex.acquire(60)
                            for foreign_guid in foreign_guids[classname]:
                                reverse_key = 'ovs_reverseindex_{0}_{1}'.format(
                                    classname, foreign_guid)
                                reverse_index = volatile.get(reverse_key)
                                if reverse_index is None:
                                    reverse_index = {}
                                if relation.foreign_key not in reverse_index:
                                    reverse_index[relation.foreign_key] = []
                                    volatile.set(reverse_key, reverse_index)
                        finally:
                            mutex.release()
                    key = getattr(instance, '{0}_guid'.format(relation.name))
                    if key is not None:
                        try:
                            mutex.acquire(60)
                            reverse_index = volatile.get(
                                'ovs_reverseindex_{0}_{1}'.format(
                                    classname, key))
                            if reverse_index is None:
                                reverse_index = {}
                            if relation.foreign_key not in reverse_index:
                                reverse_index[relation.foreign_key] = []
                            if guid not in reverse_index[relation.foreign_key]:
                                reverse_index[relation.foreign_key].append(
                                    guid)
                                volatile.set(
                                    'ovs_reverseindex_{0}_{1}'.format(
                                        classname, key), reverse_index)
                        finally:
                            mutex.release()
            except ObjectNotFoundException:
                pass

        try:
            mutex.acquire(60)
            reverse_key = 'ovs_reverseindex_{0}_{1}'.format(own_name, own_guid)
            reverse_index = volatile.get(reverse_key)
            if reverse_index is None:
                reverse_index = {}
            if own_key not in reverse_index:
                reverse_index[own_key] = []
                volatile.set(reverse_key, reverse_index)
            datalist.data = reverse_index[own_key]
            datalist.from_cache = False
        finally:
            mutex.release()
        return datalist
Esempio n. 12
0
    def _load(self):
        """
        Tries to load the result for the given key from the volatile cache, or executes the query
        if not yet available. Afterwards (if a key is given), the result will be (re)cached
        """
        self.data = self._volatile.get(
            self._key) if self._key is not None else None
        if self.data is None:
            # The query should be a dictionary:
            #     {'object': Disk,  # Object on which the query should be executed
            #      'data'  : DataList.select.XYZ,  # The requested result
            #      'query' : <query>}  # The actual query
            # Where <query> is a query(group) dictionary:
            #     {'type' : DataList.where_operator.ABC,  # Whether the items should be AND/OR
            #      'items': <items>}  # The items in the group
            # Where the <items> is any combination of one or more <filter> or <query>
            # A <filter> tuple example:
            #     (<field>, DataList.operator.GHI, <value>)  # For example EQUALS
            # The field is any property you would also find on the given object. In case of
            # properties, you can dot as far as you like. This means you can combine AND and OR
            # in any possible combination

            Toolbox.log_cache_hit('datalist', False)
            hybrid_structure = HybridRunner.get_hybrids()

            items = self._query['query']['items']
            query_type = self._query['query']['type']
            query_data = self._query['data']
            query_object = self._query['object']
            query_object_id = Descriptor(query_object).descriptor['identifier']
            if query_object_id in hybrid_structure and query_object_id != hybrid_structure[
                    query_object_id]['identifier']:
                query_object = Descriptor().load(
                    hybrid_structure[query_object_id]).get_object()

            invalidations = {query_object.__name__.lower(): ['__all']}
            DataList._build_invalidations(invalidations, query_object, items)

            for class_name in invalidations:
                key = '{0}_{1}'.format(DataList.cachelink, class_name)
                mutex = VolatileMutex('listcache_{0}'.format(class_name))
                try:
                    mutex.acquire(60)
                    cache_list = Toolbox.try_get(key, {})
                    current_fields = cache_list.get(self._key, [])
                    current_fields = list(
                        set(current_fields + ['__all'] +
                            invalidations[class_name]))
                    cache_list[self._key] = current_fields
                    self._volatile.set(key, cache_list)
                    self._persistent.set(key, cache_list)
                finally:
                    mutex.release()

            self.from_cache = False
            namespace = query_object()._namespace
            name = query_object.__name__.lower()
            guids = DataList.get_pks(namespace, name)

            if query_data == DataList.select.COUNT:
                self.data = 0
            else:
                self.data = []

            for guid in guids:
                try:
                    instance = query_object(guid)
                    if query_type == DataList.where_operator.AND:
                        include = self._exec_and(instance, items)
                    elif query_type == DataList.where_operator.OR:
                        include = self._exec_or(instance, items)
                    else:
                        raise NotImplementedError(
                            'The given operator is not yet implemented.')
                    if include:
                        if query_data == DataList.select.COUNT:
                            self.data += 1
                        elif query_data == DataList.select.GUIDS:
                            self.data.append(guid)
                        else:
                            raise NotImplementedError(
                                'The given selector type is not implemented')
                except ObjectNotFoundException:
                    pass

            if self._post_query_hook is not None:
                self._post_query_hook(self)

            if self._key is not None and len(guids) > 0 and self._can_cache:
                invalidated = False
                for class_name in invalidations:
                    key = '{0}_{1}'.format(DataList.cachelink, class_name)
                    cache_list = Toolbox.try_get(key, {})
                    if self._key not in cache_list:
                        invalidated = True
                # If the key under which the list should be saved was already invalidated since the invalidations
                # were saved, the returned list is most likely outdated. This is OK for this result, but the list
                # won't get cached
                if invalidated is False:
                    self._volatile.set(
                        self._key, self.data, 300 +
                        randint(0, 300))  # Cache between 5 and 10 minutes
        else:
            Toolbox.log_cache_hit('datalist', True)
            self.from_cache = True
        return self
Esempio n. 13
0
        def new_function(*args, **kwargs):
            """
            Wrapped function
            """
            request = _find_request(args)

            # 1. Pre-loading request data
            sort = request.QUERY_PARAMS.get('sort')
            if sort is None and default_sort is not None:
                sort = default_sort
            sort = None if sort is None else [
                s for s in reversed(sort.split(','))
            ]
            page = request.QUERY_PARAMS.get('page')
            page = int(page) if page is not None and page.isdigit() else None
            page_size = request.QUERY_PARAMS.get('page_size')
            page_size = int(
                page_size
            ) if page_size is not None and page_size.isdigit() else None
            page_size = page_size if page_size in [10, 25, 50, 100] else 10
            contents = request.QUERY_PARAMS.get('contents')
            contents = None if contents is None else contents.split(',')

            # 2. Construct hints for decorated function (so it can provide full objects if required)
            if 'hints' not in kwargs:
                kwargs['hints'] = {}
            kwargs['hints']['full'] = sort is not None or contents is not None

            # 3. Fetch data
            data_list = f(*args, **kwargs)
            guid_list = isinstance(data_list,
                                   list) and len(data_list) > 0 and isinstance(
                                       data_list[0], basestring)

            # 4. Sorting
            if sort is not None:
                if guid_list is True:
                    data_list = [object_type(guid) for guid in data_list]
                    guid_list = False  # The list is converted to objects
                for sort_item in sort:
                    desc = sort_item[0] == '-'
                    field = sort_item[1 if desc else 0:]
                    data_list.sort(
                        key=lambda e: DalToolbox.extract_key(e, field),
                        reverse=desc)

            # 5. Paging
            total_items = len(data_list)
            page_metadata = {
                'total_items': total_items,
                'current_page': 1,
                'max_page': 1,
                'page_size': page_size,
                'start_number': min(1, total_items),
                'end_number': total_items
            }
            if page is not None:
                max_page = int(math.ceil(total_items / (page_size * 1.0)))
                if page > max_page:
                    page = max_page
                if page == 0:
                    start_number = -1
                    end_number = 0
                else:
                    start_number = (
                        page - 1
                    ) * page_size  # Index - e.g. 0 for page 1, 10 for page 2
                    end_number = start_number + page_size  # Index - e.g. 10 for page 1, 20 for page 2
                data_list = data_list[start_number:end_number]
                page_metadata = dict(
                    page_metadata.items() + {
                        'current_page': max(1, page),
                        'max_page': max(1, max_page),
                        'start_number': start_number + 1,
                        'end_number': min(total_items, end_number)
                    }.items())
            else:
                page_metadata['page_size'] = total_items

            # 6. Serializing
            if contents is not None:
                if guid_list is True:
                    data_list = [object_type(guid) for guid in data_list]
                data = FullSerializer(object_type,
                                      contents=contents,
                                      instance=data_list,
                                      many=True).data
            else:
                if guid_list is False:
                    data_list = [item.guid for item in data_list]
                data = data_list

            result = {
                'data': data,
                '_paging': page_metadata,
                '_contents': contents,
                '_sorting': [s for s in reversed(sort)] if sort else sort
            }

            # 7. Building response
            return Response(result, status=status.HTTP_200_OK)
Esempio n. 14
0
        def new_function(*args, **kwargs):
            """
            Wrapped function
            """
            request = _find_request(args)

            # 1. Pre-loading request data
            sort = request.QUERY_PARAMS.get('sort')
            if sort is None and default_sort is not None:
                sort = default_sort
            sort = None if sort is None else [s for s in reversed(sort.split(','))]
            page = request.QUERY_PARAMS.get('page')
            page = int(page) if page is not None and page.isdigit() else None
            page_size = request.QUERY_PARAMS.get('page_size')
            page_size = int(page_size) if page_size is not None and page_size.isdigit() else None
            page_size = page_size if page_size in [10, 25, 50, 100] else 10
            contents = request.QUERY_PARAMS.get('contents')
            contents = None if contents is None else contents.split(',')

            # 2. Construct hints for decorated function (so it can provide full objects if required)
            if 'hints' not in kwargs:
                kwargs['hints'] = {}
            kwargs['hints']['full'] = sort is not None or contents is not None

            # 3. Fetch data
            data_list = f(*args, **kwargs)
            guid_list = isinstance(data_list, list) and len(data_list) > 0 and isinstance(data_list[0], basestring)

            # 4. Sorting
            if sort is not None:
                if guid_list is True:
                    data_list = [object_type(guid) for guid in data_list]
                    guid_list = False  # The list is converted to objects
                for sort_item in sort:
                    desc = sort_item[0] == '-'
                    field = sort_item[1 if desc else 0:]
                    data_list.sort(key=lambda e: DalToolbox.extract_key(e, field), reverse=desc)

            # 5. Paging
            total_items = len(data_list)
            page_metadata = {'total_items': total_items,
                             'current_page': 1,
                             'max_page': 1,
                             'page_size': page_size,
                             'start_number': min(1, total_items),
                             'end_number': total_items}
            if page is not None:
                max_page = int(math.ceil(total_items / (page_size * 1.0)))
                if page > max_page:
                    page = max_page
                if page == 0:
                    start_number = -1
                    end_number = 0
                else:
                    start_number = (page - 1) * page_size  # Index - e.g. 0 for page 1, 10 for page 2
                    end_number = start_number + page_size  # Index - e.g. 10 for page 1, 20 for page 2
                data_list = data_list[start_number: end_number]
                page_metadata = dict(page_metadata.items() + {'current_page': max(1, page),
                                                              'max_page': max(1, max_page),
                                                              'start_number': start_number + 1,
                                                              'end_number': min(total_items, end_number)}.items())
            else:
                page_metadata['page_size'] = total_items

            # 6. Serializing
            if contents is not None:
                if guid_list is True:
                    data_list = [object_type(guid) for guid in data_list]
                data = FullSerializer(object_type, contents=contents, instance=data_list, many=True).data
            else:
                if guid_list is False:
                    data_list = [item.guid for item in data_list]
                data = data_list

            result = {'data': data,
                      '_paging': page_metadata,
                      '_contents': contents,
                      '_sorting': [s for s in reversed(sort)] if sort else sort}

            # 7. Building response
            return Response(result, status=status.HTTP_200_OK)
Esempio n. 15
0
    def delete(self, abandon=None, _hook=None):
        """
        Delete the given object. It also invalidates certain lists
        """
        if self.volatile is True:
            raise VolatileObjectException()

        # Check foreign relations
        relations = RelationMapper.load_foreign_relations(self.__class__)
        if relations is not None:
            for key, info in relations.iteritems():
                items = getattr(self, key)
                if info['list'] is True:
                    if len(items) > 0:
                        if abandon is not None and (key in abandon or '_all' in abandon):
                            for item in items.itersafe():
                                setattr(item, info['key'], None)
                                try:
                                    item.save()
                                except ObjectNotFoundException:
                                    pass
                        else:
                            multi = 'are {0} items'.format(len(items)) if len(items) > 1 else 'is 1 item'
                            raise LinkedObjectException('There {0} left in self.{1}'.format(multi, key))
                elif items is not None:
                    # No list (so a 1-to-1 relation), so there should be an object, or None
                    item = items  # More clear naming
                    if abandon is not None and (key in abandon or '_all' in abandon):
                        setattr(item, info['key'], None)
                        try:
                            item.save()
                        except ObjectNotFoundException:
                            pass
                    else:
                        raise LinkedObjectException('There is still an item linked in self.{0}'.format(key))

        # Delete the object out of the persistent store
        try:
            self._persistent.delete(self._key)
        except KeyNotFoundException:
            pass

        # First, update reverse index
        try:
            self._mutex_reverseindex.acquire(60)
            for relation in self._relations:
                key = relation.name
                original_guid = self._original[key]['guid']
                if original_guid is not None:
                    if relation.foreign_type is None:
                        classname = self.__class__.__name__.lower()
                    else:
                        classname = relation.foreign_type.__name__.lower()
                    reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid)
                    reverse_index = self._volatile.get(reverse_key)
                    if reverse_index is not None:
                        if relation.foreign_key in reverse_index:
                            entries = reverse_index[relation.foreign_key]
                            if self.guid in entries:
                                entries.remove(self.guid)
                                reverse_index[relation.foreign_key] = entries
                                self._volatile.set(reverse_key, reverse_index)
            self._volatile.delete('ovs_reverseindex_{0}_{1}'.format(self._classname, self.guid))
        finally:
            self._mutex_reverseindex.release()

        # Second, invalidate property lists
        try:
            self._mutex_listcache.acquire(60)
            cache_key = '{0}_{1}'.format(DataList.cachelink, self._classname)
            cache_list = Toolbox.try_get(cache_key, {})
            change = False
            for list_key in cache_list.keys():
                fields = cache_list[list_key]
                if '__all' in fields:
                    change = True
                    self._volatile.delete(list_key)
                    del cache_list[list_key]
            if change is True:
                self._volatile.set(cache_key, cache_list)
                self._persistent.set(cache_key, cache_list)
        finally:
            self._mutex_listcache.release()

        # Delete the object and its properties out of the volatile store
        self.invalidate_dynamics()
        self._volatile.delete(self._key)
Esempio n. 16
0
    def save(self, recursive=False, skip=None, _hook=None):
        """
        Save the object to the persistent backend and clear cache, making use
        of the specified conflict resolve settings.
        It will also invalidate certain caches if required. For example lists pointing towards this
        object
        """
        if self.volatile is True:
            raise VolatileObjectException()

        tries = 0
        successful = False
        while successful is False:
            invalid_fields = []
            for prop in self._properties:
                if prop.mandatory is True and self._data[prop.name] is None:
                    invalid_fields.append(prop.name)
            for relation in self._relations:
                if relation.mandatory is True and self._data[relation.name]['guid'] is None:
                    invalid_fields.append(relation.name)
            if len(invalid_fields) > 0:
                raise MissingMandatoryFieldsException('Missing fields on {0}: {1}'.format(self._classname, ', '.join(invalid_fields)))

            if recursive:
                # Save objects that point to us (e.g. disk.vmachine - if this is disk)
                for relation in self._relations:
                    if relation.name != skip:  # disks will be skipped
                        item = getattr(self, relation.name)
                        if item is not None:
                            item.save(recursive=True, skip=relation.foreign_key)

                # Save object we point at (e.g. machine.vdisks - if this is machine)
                relations = RelationMapper.load_foreign_relations(self.__class__)
                if relations is not None:
                    for key, info in relations.iteritems():
                        if key != skip:  # machine will be skipped
                            if info['list'] is True:
                                for item in getattr(self, key).iterloaded():
                                    item.save(recursive=True, skip=info['key'])
                            else:
                                item = getattr(self, key)
                                if item is not None:
                                    item.save(recursive=True, skip=info['key'])

            for relation in self._relations:
                if self._data[relation.name]['guid'] is not None:
                    if relation.foreign_type is None:
                        cls = self.__class__
                    else:
                        cls = relation.foreign_type
                    _ = cls(self._data[relation.name]['guid'])

            try:
                data = self._persistent.get(self._key)
            except KeyNotFoundException:
                if self._new:
                    data = {'_version': 0}
                else:
                    raise ObjectNotFoundException('{0} with guid \'{1}\' was deleted'.format(
                        self.__class__.__name__, self._guid
                    ))
            changed_fields = []
            data_conflicts = []
            for attribute in self._data.keys():
                if attribute == '_version':
                    continue
                if self._data[attribute] != self._original[attribute]:
                    # We changed this value
                    changed_fields.append(attribute)
                    if attribute in data and self._original[attribute] != data[attribute]:
                        # Some other process also wrote to the database
                        if self._datastore_wins is None:
                            # In case we didn't set a policy, we raise the conflicts
                            data_conflicts.append(attribute)
                        elif self._datastore_wins is False:
                            # If the datastore should not win, we just overwrite the data
                            data[attribute] = self._data[attribute]
                        # If the datastore should win, we discard/ignore our change
                    else:
                        # Normal scenario, saving data
                        data[attribute] = self._data[attribute]
                elif attribute not in data:
                    data[attribute] = self._data[attribute]
            for attribute in data.keys():
                if attribute == '_version':
                    continue
                if attribute not in self._data:
                    del data[attribute]
            if data_conflicts:
                raise ConcurrencyException('Got field conflicts while saving {0}. Conflicts: {1}'.format(
                    self._classname, ', '.join(data_conflicts)
                ))

            # Refresh internal data structure
            self._data = copy.deepcopy(data)

            caching_keys = []
            try:
                # First, update reverse index
                try:
                    self._mutex_reverseindex.acquire(60)
                    for relation in self._relations:
                        key = relation.name
                        original_guid = self._original[key]['guid']
                        new_guid = self._data[key]['guid']
                        if original_guid != new_guid:
                            if relation.foreign_type is None:
                                classname = self.__class__.__name__.lower()
                            else:
                                classname = relation.foreign_type.__name__.lower()
                            if original_guid is not None:
                                reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, original_guid)
                                reverse_index = self._volatile.get(reverse_key)
                                if reverse_index is not None:
                                    if relation.foreign_key in reverse_index:
                                        entries = reverse_index[relation.foreign_key]
                                        if self.guid in entries:
                                            entries.remove(self.guid)
                                            reverse_index[relation.foreign_key] = entries
                                            caching_keys.append(reverse_key)
                                            self._volatile.set(reverse_key, reverse_index)
                            if new_guid is not None:
                                reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, new_guid)
                                reverse_index = self._volatile.get(reverse_key)
                                if reverse_index is not None:
                                    if relation.foreign_key in reverse_index:
                                        entries = reverse_index[relation.foreign_key]
                                        if self.guid not in entries:
                                            entries.append(self.guid)
                                            reverse_index[relation.foreign_key] = entries
                                            caching_keys.append(reverse_key)
                                            self._volatile.set(reverse_key, reverse_index)
                                    else:
                                        reverse_index[relation.foreign_key] = [self.guid]
                                        caching_keys.append(reverse_key)
                                        self._volatile.set(reverse_key, reverse_index)
                                else:
                                    reverse_index = {relation.foreign_key: [self.guid]}
                                    caching_keys.append(reverse_key)
                                    self._volatile.set(reverse_key, reverse_index)
                    if self._new is True:
                        reverse_key = 'ovs_reverseindex_{0}_{1}'.format(self._classname, self.guid)
                        reverse_index = self._volatile.get(reverse_key)
                        if reverse_index is None:
                            reverse_index = {}
                            relations = RelationMapper.load_foreign_relations(self.__class__)
                            if relations is not None:
                                for key, _ in relations.iteritems():
                                    reverse_index[key] = []
                            caching_keys.append(reverse_key)
                            self._volatile.set(reverse_key, reverse_index)
                finally:
                    self._mutex_reverseindex.release()

                # Second, invalidate property lists
                try:
                    self._mutex_listcache.acquire(60)
                    cache_key = '{0}_{1}'.format(DataList.cachelink, self._classname)
                    cache_list = Toolbox.try_get(cache_key, {})
                    change = False
                    for list_key in cache_list.keys():
                        fields = cache_list[list_key]
                        if ('__all' in fields and self._new) or list(set(fields) & set(changed_fields)):
                            change = True
                            self._volatile.delete(list_key)
                            del cache_list[list_key]
                    if change is True:
                        self._volatile.set(cache_key, cache_list)
                        self._persistent.set(cache_key, cache_list)
                finally:
                    self._mutex_listcache.release()

                if _hook is not None and hasattr(_hook, '__call__'):
                    _hook()

                # Save the data
                try:
                    self._mutex_version.acquire(30)
                    this_version = self._data['_version']
                    try:
                        store_version = self._persistent.get(self._key)['_version']
                    except KeyNotFoundException:
                        store_version = 0
                    if this_version == store_version:
                        self._data['_version'] = this_version + 1
                        self._persistent.set(self._key, self._data)
                        self._volatile.delete(self._key)
                        successful = True
                    else:
                        tries += 1
                finally:
                    self._mutex_version.release()
                if tries > 5:
                    raise SaveRaceConditionException()
            except:
                for key in caching_keys:
                    self._volatile.delete(key)
                raise

        self.invalidate_dynamics()
        self._original = copy.deepcopy(self._data)

        self.dirty = False
        self._new = False
Esempio n. 17
0
    def _load(self):
        """
        Tries to load the result for the given key from the volatile cache, or executes the query
        if not yet available. Afterwards (if a key is given), the result will be (re)cached
        """
        self.data = self._volatile.get(self._key) if self._key is not None else None
        if self.data is None:
            # The query should be a dictionary:
            #     {'object': Disk,  # Object on which the query should be executed
            #      'data'  : DataList.select.XYZ,  # The requested result
            #      'query' : <query>}  # The actual query
            # Where <query> is a query(group) dictionary:
            #     {'type' : DataList.where_operator.ABC,  # Whether the items should be AND/OR
            #      'items': <items>}  # The items in the group
            # Where the <items> is any combination of one or more <filter> or <query>
            # A <filter> tuple example:
            #     (<field>, DataList.operator.GHI, <value>)  # For example EQUALS
            # The field is any property you would also find on the given object. In case of
            # properties, you can dot as far as you like. This means you can combine AND and OR
            # in any possible combination

            Toolbox.log_cache_hit('datalist', False)
            hybrid_structure = HybridRunner.get_hybrids()

            items = self._query['query']['items']
            query_type = self._query['query']['type']
            query_data = self._query['data']
            query_object = self._query['object']
            query_object_id = Descriptor(query_object).descriptor['identifier']
            if query_object_id in hybrid_structure and query_object_id != hybrid_structure[query_object_id]['identifier']:
                query_object = Descriptor().load(hybrid_structure[query_object_id]).get_object()

            invalidations = {query_object.__name__.lower(): ['__all']}
            DataList._build_invalidations(invalidations, query_object, items)

            for class_name in invalidations:
                key = '{0}_{1}'.format(DataList.cachelink, class_name)
                mutex = VolatileMutex('listcache_{0}'.format(class_name))
                try:
                    mutex.acquire(60)
                    cache_list = Toolbox.try_get(key, {})
                    current_fields = cache_list.get(self._key, [])
                    current_fields = list(set(current_fields + ['__all'] + invalidations[class_name]))
                    cache_list[self._key] = current_fields
                    self._volatile.set(key, cache_list)
                    self._persistent.set(key, cache_list)
                finally:
                    mutex.release()

            self.from_cache = False
            namespace = query_object()._namespace
            name = query_object.__name__.lower()
            guids = DataList.get_pks(namespace, name)

            if query_data == DataList.select.COUNT:
                self.data = 0
            else:
                self.data = []

            for guid in guids:
                try:
                    instance = query_object(guid)
                    if query_type == DataList.where_operator.AND:
                        include = self._exec_and(instance, items)
                    elif query_type == DataList.where_operator.OR:
                        include = self._exec_or(instance, items)
                    else:
                        raise NotImplementedError('The given operator is not yet implemented.')
                    if include:
                        if query_data == DataList.select.COUNT:
                            self.data += 1
                        elif query_data == DataList.select.GUIDS:
                            self.data.append(guid)
                        else:
                            raise NotImplementedError('The given selector type is not implemented')
                except ObjectNotFoundException:
                    pass

            if 'post_query' in DataList.test_hooks:
                DataList.test_hooks['post_query'](self)

            if self._key is not None and len(guids) > 0 and self._can_cache:
                invalidated = False
                for class_name in invalidations:
                    key = '{0}_{1}'.format(DataList.cachelink, class_name)
                    cache_list = Toolbox.try_get(key, {})
                    if self._key not in cache_list:
                        invalidated = True
                # If the key under which the list should be saved was already invalidated since the invalidations
                # were saved, the returned list is most likely outdated. This is OK for this result, but the list
                # won't get cached
                if invalidated is False:
                    self._volatile.set(self._key, self.data, 300 + randint(0, 300))  # Cache between 5 and 10 minutes
        else:
            Toolbox.log_cache_hit('datalist', True)
            self.from_cache = True
        return self
Esempio n. 18
0
    def __init__(self, guid=None, data=None, datastore_wins=False, volatile=False, _hook=None):
        """
        Loads an object with a given guid. If no guid is given, a new object
        is generated with a new guid.
        * guid: The guid indicating which object should be loaded
        * datastoreWins: Optional boolean indicating save conflict resolve management.
        ** True: when saving, external modified fields will not be saved
        ** False: when saving, all changed data will be saved, regardless of external updates
        ** None: in case changed field were also changed externally, an error will be raised
        """

        # Initialize super class
        super(DataObject, self).__init__()

        # Initialize internal fields
        self._frozen = False
        self._datastore_wins = datastore_wins
        self._guid = None             # Guid identifier of the object
        self._original = {}           # Original data copy
        self._metadata = {}           # Some metadata, mainly used for unit testing
        self._data = {}               # Internal data storage
        self._objects = {}            # Internal objects storage

        # Initialize public fields
        self.dirty = False
        self.volatile = volatile

        # Worker fields/objects
        self._classname = self.__class__.__name__.lower()
        self._namespace = 'ovs_data'   # Namespace of the object
        self._mutex_listcache = VolatileMutex('listcache_{0}'.format(self._classname))
        self._mutex_reverseindex = VolatileMutex('reverseindex')

        # Rebuild _relation types
        hybrid_structure = HybridRunner.get_hybrids()
        for relation in self._relations:
            if relation.foreign_type is not None:
                identifier = Descriptor(relation.foreign_type).descriptor['identifier']
                if identifier in hybrid_structure and identifier != hybrid_structure[identifier]['identifier']:
                    relation.foreign_type = Descriptor().load(hybrid_structure[identifier]).get_object()

        # Init guid
        self._new = False
        if guid is None:
            self._guid = str(uuid.uuid4())
            self._new = True
        else:
            guid = str(guid).lower()
            if re.match('^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$', guid) is not None:
                self._guid = str(guid)
            else:
                raise ValueError('The given guid is invalid: {0}'.format(guid))

        # Build base keys
        self._key = '{0}_{1}_{2}'.format(self._namespace, self._classname, self._guid)

        # Worker mutexes
        self._mutex_version = VolatileMutex('ovs_dataversion_{0}_{1}'.format(self._classname, self._guid))

        # Load data from cache or persistent backend where appropriate
        self._volatile = VolatileFactory.get_client()
        self._persistent = PersistentFactory.get_client()
        self._metadata['cache'] = None
        if self._new:
            self._data = {}
        else:
            self._data = self._volatile.get(self._key)
            if self._data is None:
                Toolbox.log_cache_hit('object_load', False)
                self._metadata['cache'] = False
                try:
                    self._data = self._persistent.get(self._key)
                except KeyNotFoundException:
                    raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format(
                        self.__class__.__name__, self._guid
                    ))
            else:
                Toolbox.log_cache_hit('object_load', True)
                self._metadata['cache'] = True

        # Set default values on new fields
        for prop in self._properties:
            if prop.name not in self._data:
                self._data[prop.name] = prop.default
            self._add_property(prop)

        # Load relations
        for relation in self._relations:
            if relation.name not in self._data:
                if relation.foreign_type is None:
                    cls = self.__class__
                else:
                    cls = relation.foreign_type
                self._data[relation.name] = Descriptor(cls).descriptor
            self._add_relation_property(relation)

        # Add wrapped properties
        for dynamic in self._dynamics:
            self._add_dynamic_property(dynamic)

        # Load foreign keys
        relations = RelationMapper.load_foreign_relations(self.__class__)
        if relations is not None:
            for key, info in relations.iteritems():
                self._objects[key] = {'info': info,
                                      'data': None}
                self._add_list_property(key, info['list'])

        # Store original data
        self._original = copy.deepcopy(self._data)

        if _hook is not None and hasattr(_hook, '__call__'):
            _hook()

        if not self._new:
            # Re-cache the object, if required
            if self._metadata['cache'] is False:
                # The data wasn't loaded from the cache, so caching is required now
                try:
                    self._mutex_version.acquire(30)
                    this_version = self._data['_version']
                    store_version = self._persistent.get(self._key)['_version']
                    if this_version == store_version:
                        self._volatile.set(self._key, self._data)
                except KeyNotFoundException:
                    raise ObjectNotFoundException('{0} with guid \'{1}\' could not be found'.format(
                        self.__class__.__name__, self._guid
                    ))
                finally:
                    self._mutex_version.release()

        # Freeze property creation
        self._frozen = True

        # Optionally, initialize some fields
        if data is not None:
            for prop in self._properties:
                if prop.name in data:
                    setattr(self, prop.name, data[prop.name])
Esempio n. 19
0
    def verify_required_params(required_params, actual_params):
        """
        Verify whether the actual parameters match the required parameters
        :param required_params: Required parameters which actual parameters have to meet
        :param actual_params: Actual parameters to check for validity
        :return: None
        """
        error_messages = []
        for required_key, key_info in required_params.iteritems():
            expected_type = key_info[0]
            expected_value = key_info[1]
            optional = len(key_info) == 3 and key_info[2] is False

            if optional is True and (required_key not in actual_params
                                     or actual_params[required_key]
                                     in ('', None)):
                continue

            if required_key not in actual_params:
                error_messages.append(
                    'Missing required param "{0}"'.format(required_key))
                continue

            actual_value = actual_params[required_key]
            if HelperToolbox.check_type(actual_value,
                                        expected_type)[0] is False:
                error_messages.append(
                    'Required param "{0}" is of type "{1}" but we expected type "{2}"'
                    .format(required_key, type(actual_value), expected_type))
                continue

            if expected_value is None:
                continue

            if expected_type == list:
                if type(
                        expected_value
                ) == Toolbox.compiled_regex_type:  # List of strings which need to match regex
                    for item in actual_value:
                        if not re.match(expected_value, item):
                            error_messages.append(
                                'Required param "{0}" has an item "{1}" which does not match regex "{2}"'
                                .format(required_key, item,
                                        expected_value.pattern))
            elif expected_type == dict:
                Toolbox.verify_required_params(expected_value,
                                               actual_params[required_key])
            elif expected_type == int:
                if isinstance(expected_value,
                              list) and actual_value not in expected_value:
                    error_messages.append(
                        'Required param "{0}" with value "{1}" should be 1 of the following: {2}'
                        .format(required_key, actual_value, expected_value))
                if isinstance(expected_value, dict):
                    minimum = expected_value.get('min', sys.maxint * -1)
                    maximum = expected_value.get('max', sys.maxint)
                    if not minimum <= actual_value <= maximum:
                        error_messages.append(
                            'Required param "{0}" with value "{1}" should be in range: {2} - {3}'
                            .format(required_key, actual_value, minimum,
                                    maximum))
            else:
                if HelperToolbox.check_type(
                        expected_value, list
                )[0] is True and actual_value not in expected_value:
                    error_messages.append(
                        'Required param "{0}" with value "{1}" should be 1 of the following: {2}'
                        .format(required_key, actual_value, expected_value))
                elif HelperToolbox.check_type(
                        expected_value, Toolbox.compiled_regex_type
                )[0] is True and not re.match(expected_value, actual_value):
                    error_messages.append(
                        'Required param "{0}" with value "{1}" does not match regex "{2}"'
                        .format(required_key, actual_value,
                                expected_value.pattern))
        if error_messages:
            raise RuntimeError('\n' + '\n'.join(error_messages))
Esempio n. 20
0
    def test_lotsofobjects(self):
        """
        A test creating, linking and querying a lot of objects
        """
        print ''
        print 'cleaning up'
        self._clean_all()
        print 'start test'
        tstart = time.time()
        if getattr(LotsOfObjects, 'amount_of_machines', None) is None:
            LotsOfObjects.amount_of_machines = 50
        if getattr(LotsOfObjects, 'amount_of_disks', None) is None:
            LotsOfObjects.amount_of_disks = 5
        load_data = True
        mguids = []
        if load_data:
            print '\nstart loading data'
            start = time.time()
            runtimes = []
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                mstart = time.time()
                machine = TestMachine()
                machine.name = 'machine_{0}'.format(i)
                machine.save()
                mguids.append(machine.guid)
                for ii in xrange(0, int(LotsOfObjects.amount_of_disks)):
                    disk = TestDisk()
                    disk.name = 'disk_{0}_{1}'.format(i, ii)
                    disk.size = ii * 100
                    disk.machine = machine
                    disk.save()
                avgitemspersec = ((i + 1) * LotsOfObjects.amount_of_disks) / (
                    time.time() - start)
                itemspersec = LotsOfObjects.amount_of_disks / (time.time() -
                                                               mstart)
                runtimes.append(itemspersec)
                LotsOfObjects._print_progress(
                    '* machine {0}/{1} (run: {2} dps, avg: {3} dps)'.format(
                        i + 1, int(LotsOfObjects.amount_of_machines),
                        round(itemspersec, 2), round(avgitemspersec, 2)))
            runtimes.sort()
            print '\nloading done ({0}s). min: {1} dps, max: {2} dps'.format(
                round(time.time() - tstart, 2), round(runtimes[1], 2),
                round(runtimes[-2], 2))

        test_queries = True
        if test_queries:
            print '\nstart queries'
            start = time.time()
            runtimes = []
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                mstart = time.time()
                machine = TestMachine(mguids[i])
                assert len(
                    machine.disks
                ) == LotsOfObjects.amount_of_disks, 'Not all disks were retrieved ({0})'.format(
                    len(machine.disks))
                avgitemspersec = ((i + 1) * LotsOfObjects.amount_of_disks) / (
                    time.time() - start)
                itemspersec = LotsOfObjects.amount_of_disks / (time.time() -
                                                               mstart)
                runtimes.append(itemspersec)
                LotsOfObjects._print_progress(
                    '* machine {0}/{1} (run: {2} dps, avg: {3} dps)'.format(
                        i + 1, int(LotsOfObjects.amount_of_machines),
                        round(itemspersec, 2), round(avgitemspersec, 2)))
            runtimes.sort()
            print '\ncompleted ({0}s). min: {1} dps, max: {2} dps'.format(
                round(time.time() - tstart, 2), round(runtimes[1], 2),
                round(runtimes[-2], 2))

            print '\nstart full query on disk property'
            start = time.time()
            dlist = DataList(
                TestDisk, {
                    'type':
                    DataList.where_operator.AND,
                    'items': [('size', DataList.operator.GT, 100),
                              ('size', DataList.operator.LT,
                               (LotsOfObjects.amount_of_disks - 1) * 100)]
                })
            amount = len(dlist)
            assert amount == (
                LotsOfObjects.amount_of_disks - 3
            ) * LotsOfObjects.amount_of_machines, 'Incorrect amount of disks. Found {0} instead of {1}'.format(
                amount,
                int((LotsOfObjects.amount_of_disks - 3) *
                    LotsOfObjects.amount_of_machines))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(
                round(time.time() - tstart, 2), round(seconds_passed, 2),
                round(
                    LotsOfObjects.amount_of_machines *
                    LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nloading disks (all)'
            start = time.time()
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                machine = TestMachine(mguids[i])
                _ = [_ for _ in machine.disks]
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(
                round(time.time() - tstart, 2), round(seconds_passed, 2),
                round(
                    LotsOfObjects.amount_of_machines *
                    LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nstart full query on disk property (using cached objects)'
            dlist._volatile.delete(dlist._key)
            start = time.time()
            dlist = DataList(
                TestDisk, {
                    'type':
                    DataList.where_operator.AND,
                    'items': [('size', DataList.operator.GT, 100),
                              ('size', DataList.operator.LT,
                               (LotsOfObjects.amount_of_disks - 1) * 100)]
                })
            amount = len(dlist)
            assert amount == (
                LotsOfObjects.amount_of_disks - 3
            ) * LotsOfObjects.amount_of_machines, 'Incorrect amount of disks. Found {0} instead of {1}'.format(
                amount,
                int((LotsOfObjects.amount_of_disks - 3) *
                    LotsOfObjects.amount_of_machines))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(
                round(time.time() - tstart, 2), round(seconds_passed, 2),
                round(
                    LotsOfObjects.amount_of_machines *
                    LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nstart property sort'
            dlist = DataList(TestDisk, {
                'type': DataList.where_operator.AND,
                'items': []
            })
            start = time.time()
            dlist.sort(key=lambda a: Toolbox.extract_key(a, 'size'))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(
                round(time.time() - tstart, 2), round(seconds_passed, 2),
                round(
                    LotsOfObjects.amount_of_machines *
                    LotsOfObjects.amount_of_disks / seconds_passed, 2))

            print '\nstart dynamic sort'
            dlist._volatile.delete(dlist._key)
            dlist = DataList(TestDisk, {
                'type': DataList.where_operator.AND,
                'items': []
            })
            start = time.time()
            dlist.sort(key=lambda a: Toolbox.extract_key(a, 'predictable'))
            seconds_passed = (time.time() - start)
            print 'completed ({0}s) in {1} seconds (avg: {2} dps)'.format(
                round(time.time() - tstart, 2), round(seconds_passed, 2),
                round(
                    LotsOfObjects.amount_of_machines *
                    LotsOfObjects.amount_of_disks / seconds_passed, 2))

        clean_data = True
        if clean_data:
            print '\ncleaning up'
            start = time.time()
            runtimes = []
            for i in xrange(0, int(LotsOfObjects.amount_of_machines)):
                mstart = time.time()
                machine = TestMachine(mguids[i])
                for disk in machine.disks:
                    disk.delete()
                machine.delete()
                avgitemspersec = ((i + 1) * LotsOfObjects.amount_of_disks) / (
                    time.time() - start)
                itemspersec = LotsOfObjects.amount_of_disks / (time.time() -
                                                               mstart)
                runtimes.append(itemspersec)
                LotsOfObjects._print_progress(
                    '* machine {0}/{1} (run: {2} dps, avg: {3} dps)'.format(
                        i + 1, int(LotsOfObjects.amount_of_machines),
                        round(itemspersec, 2), round(avgitemspersec, 2)))
            runtimes.sort()
            print '\ncompleted ({0}s). min: {1} dps, max: {2} dps'.format(
                round(time.time() - tstart, 2), round(runtimes[1], 2),
                round(runtimes[-2], 2))
Esempio n. 21
0
    def get_relation_set(remote_class, remote_key, own_class, own_key, own_guid):
        """
        This method will get a DataList for a relation.
        On a cache miss, the relation DataList will be rebuild and due to the nature of the full table scan, it will
        update all relations in the mean time.
        """

        # Example:
        # * remote_class = vDisk
        # * remote_key = vmachine
        # * own_class = vMachine
        # * own_key = vdisks
        # Called to load the vMachine.vdisks list (resulting in a possible scan of vDisk objects)
        # * own_guid = this vMachine object's guid

        volatile = VolatileFactory.get_client()
        own_name = own_class.__name__.lower()
        datalist = DataList({}, '{0}_{1}_{2}'.format(own_name, own_guid, remote_key), load=False)
        reverse_key = 'ovs_reverseindex_{0}_{1}'.format(own_name, own_guid)

        # Check whether the requested information is available in cache
        reverse_index = volatile.get(reverse_key)
        if reverse_index is not None and own_key in reverse_index:
            Toolbox.log_cache_hit('datalist', True)
            datalist.data = reverse_index[own_key]
            datalist.from_cache = True
            return datalist

        Toolbox.log_cache_hit('datalist', False)
        mutex = VolatileMutex('reverseindex')
        remote_name = remote_class.__name__.lower()
        blueprint_object = remote_class()  # vDisk object
        foreign_guids = {}

        remote_namespace = blueprint_object._namespace
        for relation in blueprint_object._relations:  # E.g. vmachine or vpool relation
            if relation.foreign_type is None:
                classname = remote_name
                foreign_namespace = blueprint_object._namespace
            else:
                classname = relation.foreign_type.__name__.lower()
                foreign_namespace = relation.foreign_type()._namespace
            if classname not in foreign_guids:
                foreign_guids[classname] = DataList.get_pks(foreign_namespace, classname)
            try:
                mutex.acquire(60)
                for foreign_guid in foreign_guids[classname]:
                    reverse_key = 'ovs_reverseindex_{0}_{1}'.format(classname, foreign_guid)
                    reverse_index = volatile.get(reverse_key)
                    if reverse_index is None:
                        reverse_index = {}
                    if relation.foreign_key not in reverse_index:
                        reverse_index[relation.foreign_key] = []
                        volatile.set(reverse_key, reverse_index)
            finally:
                mutex.release()
        remote_keys = DataList.get_pks(remote_namespace, remote_name)
        for guid in remote_keys:
            try:
                instance = remote_class(guid)
                for relation in blueprint_object._relations:  # E.g. vmachine or vpool relation
                    if relation.foreign_type is None:
                        classname = remote_name
                    else:
                        classname = relation.foreign_type.__name__.lower()
                    key = getattr(instance, '{0}_guid'.format(relation.name))
                    if key is not None:
                        try:
                            mutex.acquire(60)
                            reverse_index = volatile.get('ovs_reverseindex_{0}_{1}'.format(classname, key))
                            if reverse_index is None:
                                reverse_index = {}
                            if relation.foreign_key not in reverse_index:
                                reverse_index[relation.foreign_key] = []
                            if guid not in reverse_index[relation.foreign_key]:
                                if instance.updated_on_datastore():
                                    raise ConcurrencyException()
                                reverse_index[relation.foreign_key].append(guid)
                                volatile.set('ovs_reverseindex_{0}_{1}'.format(classname, key), reverse_index)
                        finally:
                            mutex.release()
            except ObjectNotFoundException:
                pass
            except ConcurrencyException:
                pass

        try:
            mutex.acquire(60)
            reverse_key = 'ovs_reverseindex_{0}_{1}'.format(own_name, own_guid)
            reverse_index = volatile.get(reverse_key)
            if reverse_index is None:
                reverse_index = {}
            if own_key not in reverse_index:
                reverse_index[own_key] = []
                volatile.set(reverse_key, reverse_index)
            datalist.data = reverse_index[own_key]
            datalist.from_cache = False
        finally:
            mutex.release()
        return datalist