def collections(self, action): if action in self._collections: return self._collections.get(action) prefix_key = cache.key(self.PREFIX) key = cache.key(self.PREFIX, action, self.id) collections = cache.get_list(key) if len(collections): collections = [int(c) for c in collections] self._collections[action] = collections log.debug("[C] Authz: %s (%s): %s", self, action, collections) return collections if self.is_admin: q = Collection.all_ids() else: q = db.session.query(Permission.collection_id) q = q.filter(Permission.deleted_at == None) # noqa q = q.filter(Permission.role_id.in_(self.roles)) if action == self.READ: q = q.filter(Permission.read == True) # noqa if action == self.WRITE: q = q.filter(Permission.write == True) # noqa q = q.distinct() # log.info("Query: %s", q) collections = [c for (c, ) in q.all()] log.debug("Authz: %s (%s): %s", self, action, collections) cache.kv.sadd(prefix_key, key) cache.set_list(key, collections) self._collections[action] = collections return collections
def collections(self, action): if action in self._collections: return self._collections.get(action) prefix_key = cache.key(self.PREFIX) key = cache.key(self.PREFIX, action, self.id) collections = cache.get_list(key) if len(collections): collections = [int(c) for c in collections] self._collections[action] = collections log.debug("[C] Authz: %s (%s): %s", self, action, collections) return collections if self.is_admin: q = Collection.all_ids() else: q = db.session.query(Permission.collection_id) q = q.filter(Permission.deleted_at == None) # noqa q = q.filter(Permission.role_id.in_(self.roles)) if action == self.READ: q = q.filter(Permission.read == True) # noqa if action == self.WRITE: q = q.filter(Permission.write == True) # noqa q = q.distinct() # log.info("Query: %s", q) collections = [c for (c,) in q.all()] log.debug("Authz: %s (%s): %s", self, action, collections) cache.kv.sadd(prefix_key, key) cache.set_list(key, collections) self._collections[action] = collections return collections
def flush(cls): pipe = cache.kv.pipeline() prefix_key = cache.key(cls.PREFIX) for key in cache.kv.sscan_iter(prefix_key): pipe.delete(key) pipe.delete(prefix_key) pipe.execute()
def collections(self, action): if action in self._collections: return self._collections.get(action) key = cache.key(action, self.id) collections = cache.kv.hget(self.PREFIX, key) if collections: collections = json.loads(collections) self._collections[action] = collections log.debug("[C] Authz: %s (%s): %d collections", self, action, len(collections)) return collections if self.is_admin: q = Collection.all_ids() else: q = db.session.query(Permission.collection_id) q = q.filter(Permission.deleted_at == None) # noqa q = q.filter(Permission.role_id.in_(self.roles)) if action == self.READ: q = q.filter(Permission.read == True) # noqa if action == self.WRITE: q = q.filter(Permission.write == True) # noqa q = q.distinct() # log.info("Query: %s - roles: %s", q, self.roles) collections = [c for (c, ) in q.all()] log.debug("Authz: %s (%s): %d collections", self, action, len(collections)) cache.kv.hset(self.PREFIX, key, json.dumps(collections)) self._collections[action] = collections return collections
def flush_role(cls, role): # Clear collections ACL cache. cache.kv.hdel(cls.ACCESS, role.id) if role.is_blocked or role.deleted_at is not None: # End all user sessions. prefix = cache.key(cls.TOKENS, "%s." % role.id) cache.flush(prefix=prefix)
def compute_collections(): """Update collection caches, including the global stats cache.""" authz = Authz.from_role(None) schemata = defaultdict(int) countries = defaultdict(int) categories = defaultdict(int) for collection in Collection.all(): compute_collection(collection) if authz.can(collection.id, authz.READ): categories[collection.category] += 1 things = index.get_collection_things(collection.id) for schema, count in things.items(): schemata[schema] += count for country in collection.countries: countries[country] += 1 log.info("Updating global statistics cache...") data = { "collections": sum(categories.values()), "schemata": dict(schemata), "countries": dict(countries), "categories": dict(categories), "things": sum(schemata.values()), } key = cache.key(cache.STATISTICS) cache.set_complex(key, data, expires=cache.EXPIRE)
def ancestors(self): if self.parent_id is None: return [] key = cache.key('ancestors', self.id) ancestors = cache.get_list(key) if len(ancestors): return ancestors parent_key = cache.key('ancestors', self.parent_id) ancestors = cache.get_list(parent_key) if not len(ancestors): ancestors = [] parent = Document.by_id(self.parent_id) if parent is not None: ancestors = parent.ancestors ancestors.append(self.parent_id) if self.model.is_a(model.get(self.SCHEMA_FOLDER)): cache.set_list(key, ancestors, expire=cache.EXPIRE) return ancestors
def metadata(): """Get operational metadata for the frontend. --- get: summary: Retrieve system metadata from the application. responses: '200': description: OK content: application/json: schema: type: object tags: - System """ locale = get_locale() enable_cache(vary_user=False, vary=str(locale)) key = cache.key('metadata', settings.PROCESS_ID, locale) data = cache.get_complex(key) if data is not None: return jsonify(data) auth = {} if settings.PASSWORD_LOGIN: auth['password_login_uri'] = url_for('sessions_api.password_login') auth['registration_uri'] = url_for('roles_api.create_code') if settings.OAUTH: auth['oauth_uri'] = url_for('sessions_api.oauth_init') locales = settings.UI_LANGUAGES locales = {l: Locale(l).get_language_name(l) for l in locales} data = { 'status': 'ok', 'maintenance': request.authz.in_maintenance, 'app': { 'title': settings.APP_TITLE, 'description': settings.APP_DESCRIPTION, 'version': __version__, 'banner': settings.APP_BANNER, 'ui_uri': settings.APP_UI_URL, 'samples': settings.SAMPLE_SEARCHES, 'logo': settings.APP_LOGO, 'favicon': settings.APP_FAVICON, 'locale': str(locale), 'locales': locales }, 'categories': Collection.CATEGORIES, 'countries': registry.country.names, 'languages': registry.language.names, 'model': model, 'auth': auth } cache.set_complex(key, data, expires=120) return jsonify(data)
def ancestors(self): if self.parent_id is None: return [] key = cache.key('ancestors', self.id) ancestors = cache.get_list(key) if ancestors is not None: return ancestors ancestors = self.parent.ancestors ancestors.append(self.parent_id) cache.set_list(key, ancestors) return ancestors
def to_token(self): if self.token_id is None: self.token_id = "%s.%s" % (self.id, make_token()) key = cache.key(self.TOKENS, self.token_id) state = { "id": self.id, "roles": list(self.roles), "is_admin": self.is_admin, } cache.set_complex(key, state, expires=self.expire) return self.token_id
def from_token(cls, token_id): state_key = cache.key(cls.TOKENS, token_id) state = cache.get_complex(state_key) if state is None: raise Unauthorized() return cls( state.get("id"), state.get("roles"), is_admin=state.get("is_admin"), token_id=token_id, )
def get_collection_stats(collection_id): """Compute some statistics on the content of a collection.""" key = cache.key('cstats', collection_id) data = cache.get_complex(key) if data is not None: return data log.info("Generating collection stats: %s", collection_id) query = { 'size': 0, 'query': { 'bool': { 'filter': [{ 'term': { 'collection_id': collection_id } }] } }, 'aggs': { 'schemata': { 'terms': { 'field': 'schema', 'size': 1000 } }, 'countries': { 'terms': { 'field': 'countries', 'size': 500 } }, 'languages': { 'terms': { 'field': 'languages', 'size': 10 } }, } } result = search_safe(index=entities_read_index(), body=query) aggregations = result.get('aggregations', {}) data = {'count': result['hits']['total']} for facet in ['schemata', 'countries', 'languages']: data[facet] = {} for bucket in aggregations[facet]['buckets']: data[facet][bucket['key']] = bucket['doc_count'] expire = randint(3600 * 3, 3600 * 12) cache.set_complex(key, data, expire=expire) return data
def index_bulk(collection, entities): """Index a set of entities.""" lock = cache.lock(cache.key('index_bulk')) lock.acquire(blocking=True) try: actions = _index_updates(collection, entities) chunk_size = len(actions) + 1 return bulk(es, actions, chunk_size=chunk_size, max_retries=10, initial_backoff=2, request_timeout=REQUEST_TIMEOUT, timeout=TIMEOUT, refresh='wait_for') except BulkIndexError as exc: log.warning('Indexing error: %s', exc) finally: try: lock.release() except Exception: log.exception("Cannot release index lock.")
def metadata(): locale = get_locale() enable_cache(vary_user=False, vary=str(locale)) key = cache.key('metadata', locale) data = cache.get_complex(key) if data is not None: return jsonify(data) auth = {} if settings.PASSWORD_LOGIN: auth['password_login_uri'] = url_for('sessions_api.password_login') auth['registration_uri'] = url_for('roles_api.create_code') if settings.OAUTH: auth['oauth_uri'] = url_for('sessions_api.oauth_init') data = { 'status': 'ok', 'maintenance': request.authz.in_maintenance, 'app': { 'title': settings.APP_TITLE, 'description': settings.APP_DESCRIPTION, 'version': __version__, 'banner': settings.APP_BANNER, 'ui_uri': settings.APP_UI_URL, 'samples': settings.SAMPLE_SEARCHES, 'logo': settings.APP_LOGO, 'favicon': settings.APP_FAVICON, 'locale': str(locale), 'locales': settings.UI_LANGUAGES }, 'categories': Collection.CATEGORIES, 'countries': registry.country.names, 'languages': registry.language.names, 'model': model, 'auth': auth } cache.set_complex(key, data, expire=120) return jsonify(data)
def statistics(): """Get a summary of the data acessible to an anonymous user. Changed [3.9]: Previously, this would return user-specific stats. --- get: summary: System-wide user statistics. description: > Get a summary of the data acessible to an anonymous user. responses: '200': description: OK content: application/json: schema: type: object tags: - System """ enable_cache(vary_user=False) key = cache.key(cache.STATISTICS) data = {"countries": [], "schemata": [], "categories": []} data = cache.get_complex(key) or data return jsonify(data)
def _token_session(token): return cache.key("oauth-id-tok", token)
def refresh_role(role, sync=False): cache.kv.delete(cache.key(Authz.PREFIX, Authz.READ, role.id), cache.key(Authz.PREFIX, Authz.WRITE, role.id), cache.object_key(Role, role.id))
def update_roles(): # Flush authz for anonymous users: cache.kv.delete(cache.key(Authz.PREFIX, Authz.READ), cache.key(Authz.PREFIX, Authz.WRITE)) for role in Role.all(): update_role(role)
def update_role(role): """Synchronize denormalised role configuration.""" update_subscriptions.delay(role.id) cache.kv.delete(cache.key(Authz.PREFIX, Authz.READ, role.id)) cache.kv.delete(cache.key(Authz.PREFIX, Authz.WRITE, role.id))
def _oauth_session(token): return cache.key("oauth-sess", token)
def flush_role(cls, role_id): keys = [cache.key(a, role_id) for a in (cls.READ, cls.WRITE)] cache.kv.hdel(cls.PREFIX, *keys)
def refresh_role(role, sync=False): cache.kv.delete(cache.object_key(Role, role.id), cache.object_key(Role, role.id, 'channels'), cache.key(Authz.PREFIX, Authz.READ, role.id), cache.key(Authz.PREFIX, Authz.WRITE, role.id))
def flush_collection_stats(collection_id): cache.kv.delete(cache.key('cstats', collection_id))
def typed_key(node, *extra): return cache.key('g3', node.type.name, node.value, *extra)
def destroy(self): if self.role is not None: self.flush_role(self.role) if self.token_id is not None: cache.delete(cache.key(self.TOKENS, self.token_id))