def resolve(stub): _instrument_stub(stub) cache_keys = {} schemata = {} for clazz, key, schema in stub._rx_queue: if (clazz, key) in stub._rx_cache: continue cid = cache.object_key(clazz, key) cache_keys[cid] = (clazz, key) schemata[cid] = schema keys = list(cache_keys.keys()) queries = defaultdict(list) for cid, value in cache.get_many_complex(keys): clazz, key = cache_keys.get(cid) if value is None: # log.info("MISS [%s]: %s", clazz.__name__, key) if clazz == Entity: queries[schemata.get(cid)].append(key) loader = LOADERS.get(clazz) if loader is not None: value = loader(key) stub._rx_cache[(clazz, key)] = value for schema, ids in queries.items(): for entity in entities_by_ids(ids, schemata=schema, cached=True): stub._rx_cache[(Entity, entity.get('id'))] = entity
def resolve(stub): _instrument_stub(stub) cache_keys = {} schemata = {} for clazz, key, schema in stub._rx_queue: if (clazz, key) in stub._rx_cache: continue cid = cache.object_key(clazz, key) cache_keys[cid] = (clazz, key) schemata[cid] = schema keys = list(cache_keys.keys()) queries = defaultdict(list) for cid, value in cache.get_many_complex(keys): clazz, key = cache_keys.get(cid) if value is None: log.info("MISS [%s]: %s", clazz.__name__, key) if clazz == Entity: queries[schemata.get(cid)].append(key) loader = LOADERS.get(clazz) if loader is not None: value = loader(key) stub._rx_cache[(clazz, key)] = value for schema, ids in queries.items(): for entity in entities_by_ids(ids, schemata=schema, cached=True): stub._rx_cache[(Entity, entity.get('id'))] = entity
def get_collection_stats(collection_id): """Retrieve statistics on the content of a collection.""" keys = {_facet_key(collection_id, f): f for f in STATS_FACETS} empty = {"values": [], "total": 0} stats = {} for key, result in cache.get_many_complex(keys.keys(), empty): stats[keys[key]] = result return stats
def entities_by_ids(ids, schemata=None, cached=False, includes=PROXY_INCLUDES, excludes=None): """Iterate over unpacked entities based on a search for the given entity IDs.""" ids = ensure_list(ids) if not len(ids): return cached = cached and excludes is None and includes == PROXY_INCLUDES entities = {} if cached: keys = [cache.object_key(Entity, i) for i in ids] for _, entity in cache.get_many_complex(keys): if entity is not None: entities[entity.get("id")] = entity missing = [i for i in ids if entities.get(id) is None] index = entities_read_index(schema=schemata) query = { "query": { "ids": { "values": missing } }, "_source": _source_spec(includes, excludes), "size": MAX_PAGE, } result = es.search(index=index, body=query) for doc in result.get("hits", {}).get("hits", []): entity = unpack_result(doc) if entity is not None: entity_id = entity.get("id") entities[entity_id] = entity if cached: key = cache.object_key(Entity, entity_id) cache.set_complex(key, entity, expires=60 * 60 * 2) for i in ids: entity = entities.get(i) if entity is not None: yield entity