def add_cache(self, object_type, cache_impl_name, maxsize, **kwargs): """ Add a new cache for the named object type and cache implementation """ if object_type not in ZendeskObjectMapping.class_mapping: raise ZenpyException("No such object type: %s" % object_type) cache_mapping[object_type] = ZenpyCache(cache_impl_name, maxsize, **kwargs)
def add_cache(self, object_type, cache_impl_name, maxsize, **kwargs): """ Add a new cache for the named object type and cache implementation """ if object_type not in self.users.object_manager.class_manager.class_mapping: raise ZenpyException("No such object type: %s" % object_type) cache_mapping = self._get_cache_mapping() cache_mapping[object_type] = ZenpyCache(cache_impl_name, maxsize, **kwargs)
class ObjectManager(object): """ The ObjectManager is responsible for maintaining various caches and also provides access to the ClassManager """ cache_mapping = { 'user': ZenpyCache('LRUCache', maxsize=10000), 'organization': ZenpyCache('LRUCache', maxsize=10000), 'group': ZenpyCache('LRUCache', maxsize=10000), 'brand': ZenpyCache('LRUCache', maxsize=10000), 'ticket': ZenpyCache('TTLCache', maxsize=10000, ttl=30), 'comment': ZenpyCache('LRUCache', maxsize=10000), 'request': ZenpyCache('LRUCache', maxsize=10000), 'user_field': ZenpyCache('TTLCache', maxsize=10000, ttl=30), 'organization_field': ZenpyCache('LRUCache', maxsize=10000), 'ticket_field': ZenpyCache('LRUCache', maxsize=10000), 'sharing_agreement': ZenpyCache('TTLCache', maxsize=10000, ttl=6000), } def __init__(self, api): self.class_manager = ClassManager(api) def object_from_json(self, object_type, object_json): return self.class_manager.object_from_json(object_type, object_json) def delete_from_cache(self, obj): if isinstance(obj, list): for o in obj: self._delete_from_cache(o) else: self._delete_from_cache(obj) def _delete_from_cache(self, obj): object_type = to_snake_case(obj.__class__.__name__) if object_type in self.cache_mapping: cache = self.cache_mapping[object_type] obj = cache.pop(obj.id, None) if obj: log.debug("Cache RM: [%s %s]" % (object_type.capitalize(), obj.id)) def query_cache(self, object_type, _id): if object_type not in self.cache_mapping.keys(): return None cache = self.cache_mapping[object_type] if _id in cache: log.debug("Cache HIT: [%s %s]" % (object_type.capitalize(), _id)) return cache[_id] else: log.debug('Cache MISS: [%s %s]' % (object_type.capitalize(), _id)) def update_caches(self, _json): if 'results' in _json: self._cache_search_results(_json) else: for object_type in self.cache_mapping.keys(): self._add_to_cache(object_type, _json) def _add_to_cache(self, object_type, object_json): cache = self.cache_mapping[object_type] multiple_key = object_type + 's' if object_type in object_json: obj = object_json[object_type] log.debug("Caching: [%s %s]" % (object_type.capitalize(), obj['id'])) self._cache_item(cache, obj, object_type) elif multiple_key in object_json: objects = object_json[multiple_key] log.debug("Caching %s %s " % (len(objects), multiple_key.capitalize())) for obj in object_json[multiple_key]: self._cache_item(cache, obj, object_type) def _cache_search_results(self, _json): results = _json['results'] log.debug("Caching %s search results" % len(results)) for result in results: object_type = result['result_type'] cache = self.cache_mapping[object_type] self._cache_item(cache, result, object_type) def _cache_item(self, cache, item_json, item_type): key = self.get_key(item_type) cache[item_json[key]] = self.object_from_json(item_type, item_json) def get_key(self, item_type): if item_type in ('user_field', 'organization_field'): key = 'key' else: key = 'id' return key