def get_default_where_clauses( self, context: IBaseObject, unrestricted: bool = False) -> typing.List[str]: clauses = [] sql_wheres = [] if unrestricted is False: users = [] principal = get_authenticated_user() if principal is None: # assume anonymous then principal = AnonymousUser() users.append(principal.id) users.extend(principal.groups) roles = get_roles_principal(context) clauses.extend([ "json->'access_users' ?| array['{}']".format("','".join( [sqlq(u) for u in users])), "json->'access_roles' ?| array['{}']".format("','".join( [sqlq(r) for r in roles])), ]) sql_wheres.append("({})".format(" OR ".join(clauses))) container = find_container(context) if container is None: raise ContainerNotFound() sql_wheres.append( f"""json->>'container_id' = '{sqlq(container.id)}'""") sql_wheres.append("""type != 'Container'""") sql_wheres.append(f"""parent_id != '{sqlq(TRASHED_ID)}'""") return sql_wheres
async def patch_aliases(context, request): data = await request.json() container = find_container(context) await utils.add_aliases(context, data['paths'], container=container, moved=False) return {}
async def query_aggregation(self, context: IBaseObject, query: typing.Any): ''' Raw search query, uses parser to transform query ''' parsed_query = parse_query(context, query, self) container = find_container(context) if container is not None: return await self.aggregation(container, parsed_query) raise ContainerNotFound()
async def search(self, context: IBaseObject, query: typing.Any): """ Search query, uses parser to transform query """ parsed_query = parse_query(context, query, self) container = find_container(context) if container is not None: return await self._query(container, parsed_query) # type: ignore raise ContainerNotFound()
async def _query(self, context: IResource, query: ParsedQueryInfo, unrestricted: bool = False): sql, arguments = self.build_query(context, query, ["id", "zoid", "json"], unrestricted=unrestricted) txn = get_current_transaction() conn = await txn.get_connection() results = [] fullobjects = query["fullobjects"] container = find_container(context) if container is None: raise ContainerNotFound() try: context_url = get_object_url(container) request = get_current_request() except RequestNotFound: context_url = get_content_path(container) request = None logger.debug(f"Running search:\n{sql}\n{arguments}") async with txn.lock: records = await conn.fetch(sql, *arguments) for record in records: data = json.loads(record["json"]) if fullobjects and request is not None and txn is not None: # Get Object obj = await txn.get(data["uuid"]) # Serialize object view = DefaultGET(obj, request) result = await view() else: result = self.load_meatdata(query, data) result["@name"] = record["id"] result["@uid"] = record["zoid"] result["@id"] = data[ "@absolute_url"] = context_url + data["path"] results.append(result) # also do count... total = len(results) if total >= query["size"] or query["_from"] != 0: sql, arguments = self.build_count_query(context, query, unrestricted=unrestricted) logger.debug(f"Running search:\n{sql}\n{arguments}") async with txn.lock: records = await conn.fetch(sql, *arguments) total = records[0]["count"] return {"items": results, "items_total": total}
def get_indexer(context=None): search = query_utility(ICatalogUtility) if not search: return # no search configured klass = app_settings["indexer"] indexer = klass.get() if indexer is None: container = find_container(context) if container is None: return indexer = klass(container) indexer.register() return indexer
async def object_moved(ob, event): parent_path = get_content_path(event.old_parent) old_path = os.path.join(parent_path, event.old_name) storage = utils.get_storage() container = find_container(ob) execute.after_request( utils.add_aliases, ob, [old_path], moved=True, container=container, storage=storage) cache = get_utility(ICacheUtility) execute.after_request( cache.send_invalidation, ['{}-id'.format(ob.__uuid__), '{}-links'.format(ob.__uuid__), '{}-links-to'.format(ob.__uuid__)])
async def translate_links(content, container=None) -> str: """ optimized url builder here so we don't pull full objects from database however, we lose caching. Would be great to move this into an implementation that worked with current cache/invalidation strategies """ req = None if container is None: container = find_container(content) container_url = get_object_url(container, req) dom = html.fromstring(content) contexts = {} for node in dom.xpath("//a") + dom.xpath("//img"): url = node.get("href", node.get("src", "")) if "resolveuid/" not in url: continue path = [] _, _, current_uid = url.partition("resolveuid/") current_uid = current_uid.split("/")[0].split("?")[0] error = False while current_uid != container.uuid: if current_uid not in contexts: # fetch from db result = await _get_id(current_uid) if result is not None: contexts[current_uid] = result else: # could not find, this should not happen error = True break path = [contexts[current_uid]["id"]] + path current_uid = contexts[current_uid]["parent"] if error: continue url = os.path.join(container_url, "/".join(path)) attr = node.tag.lower() == "a" and "href" or "src" node.attrib[attr] = url return html.tostring(dom).decode("utf-8")
async def __call__(self): search = get_search_utility() container = find_container(self.context) depth = get_content_depth(container) max_depth = None if "expand.navigation.depth" in self.request.query: max_depth = str( int(self.request.query["expand.navigation.depth"]) + depth) depth_query = {"depth__gte": depth, "depth__lte": max_depth} else: depth_query = {"depth": depth} depth_query["hidden_navigation"] = False result = await search.search( container, { **{ "_sort_asc": "position_in_parent", "_size": 100 }, **depth_query }, ) pending_dict = {} for brain in result["items"]: brain_serialization = { "title": brain.get("title"), "@id": brain.get("@id"), "@name": brain.get("uuid"), "description": "", } pending_dict.setdefault(brain.get("parent_uuid"), []).append(brain_serialization) parent_uuid = container.uuid if parent_uuid not in pending_dict: final_list = [] else: final_list = pending_dict[parent_uuid] if max_depth is not None: recursive_fill(final_list, pending_dict) return {"@id": self.request.url, "items": final_list}
async def search_get(context, request): query = request.query.copy() search = get_search_utility(query) if search is None: return {'@id': request.url, 'items': [], 'items_total': 0} parsed_query = parse_query(context, query, search) container = find_container(context) result = await search.search(container, parsed_query) return { '@id': request.url, 'items': result['member'], 'items_total': result['items_count'], 'batching': { 'from': parsed_query['_from'] or 0, 'size': parsed_query['size'] } }
async def _iter_copyable_content(context, request): policy = get_security_policy() data = await request.json() if 'source' not in data: raise HTTPPreconditionFailed(content={'reason': 'No source'}) source = data['source'] if not isinstance(source, list): source = [source] container = find_container(context) container_url = get_object_url(container) for item in source: if item.startswith(container_url): path = item[len(container_url):] ob = await navigate_to(container, path.strip('/')) if ob is None: raise HTTPPreconditionFailed(content={ 'reason': 'Could not find content', 'source': item }) elif '/' in item: ob = await navigate_to(container, item.strip('/')) if ob is None: raise HTTPPreconditionFailed(content={ 'reason': 'Could not find content', 'source': item }) else: try: ob = await get_object_by_uid(item) except KeyError: raise HTTPPreconditionFailed(content={ 'reason': 'Could not find content', 'source': item }) if not policy.check_permission('guillotina.DuplicateContent', ob): raise HTTPPreconditionFailed(content={ 'reason': 'Invalid permission', 'source': item }) yield ob
async def add_aliases(ob, paths: list, container=None, moved=True, storage=None): # noqa if not isinstance(moved, bool): raise Exception("Invalid type {}".format(moved)) if hasattr(ob, "context"): uuid = ob.context.uuid else: uuid = ob.uuid storage = storage or get_storage() if storage is None: return if container is None: container = find_container(ob) query = Query.into(aliases_table).columns("zoid", "container_id", "path", "moved") # noqa values = [] for i, path in enumerate(paths): if not isinstance(path, str): raise Exception("Invalid type {}".format(path)) path = "/" + path.strip("/") values.append(path) query = query.insert( uuid, container.uuid, f"${i + 1}", moved, ) query = str(query) for i in range(len(paths)): query = query.replace(f"'${i + 1}'", f"${i + 1}") async with storage.pool.acquire() as conn: await conn.execute(query, *values)
def __init__(self, context): self.context = context self.search = None self.container = find_container(self.context) self.query = None self.utility = query_utility(ICatalogUtility)
def get_container_id(self): container = find_container(self.content) if container is not None: return container.__name__