def _safe_get_request(request): if request is None: try: request = get_current_request() except RequestNotFound: pass return request
async def runit(type_name): print(f'Test content creation with {type_name}') request = get_current_request() txn = mocks.MockTransaction() container = await create_content(type_name, id='container') container._p_jar = txn start = time.time() for _ in range(ITERATIONS): ob = await create_content(type_name, id='foobar') ob._p_jar = txn await notify(BeforeObjectAddedEvent(ob, container, 'foobar')) deserializer = get_multi_adapter((ob, request), IResourceDeserializeFromJson) data = { 'title': 'Foobar', 'guillotina.behaviors.dublincore.IDublinCore': { 'tags': ['foo', 'bar'] }, 'measures.configuration.ITestBehavior1': { 'foobar': '123' }, 'measures.configuration.ITestBehavior2': { 'foobar': '123' }, 'measures.configuration.ITestBehavior3': { 'foobar': '123' } } await deserializer(data, validate_all=True) await notify(ObjectAddedEvent(ob, container, 'foobar', payload=data)) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def runit(type_name): print(f'Test content serialization with {type_name}') request = get_current_request() txn = mocks.MockTransaction() ob = await create_content(type_name, id='foobar') request._db_id = 'foobar' ob._p_jar = txn deserializer = get_multi_adapter((ob, request), IResourceDeserializeFromJson) data = { 'title': 'Foobar', 'guillotina.behaviors.dublincore.IDublinCore': { 'tags': ['foo', 'bar'] }, 'measures.configuration.ITestBehavior1': { 'foobar': '123' }, 'measures.configuration.ITestBehavior2': { 'foobar': '123' }, 'measures.configuration.ITestBehavior3': { 'foobar': '123' } } await deserializer(data, validate_all=True) start = time.time() for _ in range(ITERATIONS): serializer = get_multi_adapter((ob, request), IResourceSerializeToJson) await serializer() end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def read_runner(container, strategy): request = get_current_request() txn = get_transaction(request) tm = get_tm(request) id_ = uuid.uuid4().hex await tm.abort(txn=txn) txn = await tm.begin(request=request) ob = await create_content_in_container(container, 'Item', id_) await tm.commit(txn=txn) tm._storage._transaction_strategy = strategy print(f'Test content read with {strategy} strategy') start = time.time() for _ in range(ITERATIONS): txn = await tm.begin(request=request) assert await txn.get(ob._p_oid) is not None await tm.commit(txn=txn) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds') print(f'Test large content read with {strategy} strategy') start = time.time() txn = await tm.begin(request=request) for _ in range(ITERATIONS): assert await txn.get(ob._p_oid) is not None await tm.commit(txn=txn) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds\n')
def get_principal(self, ident): request = get_current_request() if not hasattr(request, '_cache_groups'): request._cache_groups = {} if ident not in request._cache_groups.keys(): request._cache_groups[ident] = GuillotinaGroup(request, ident) return request._cache_groups[ident]
async def security_changed(obj, event): if IGroupFolder.providedBy(obj): # assuming permissions for group are already handled correctly with search await index_object(obj, modified=True, security=True) return # We need to reindex the objects below request = get_current_request() reindex_in_future(obj, request, True)
async def __aenter__(self): try: self._existing_request = get_current_request() except RequestNotFound: self._existing_request = None aiotask_context.set('request', self.request) await login(self.request, self.user) return self
async def _build_security_query( self, container, query, doc_type=None, size=10, request=None, scroll=None): if query is None: query = {} q = {} # The users who has plone.AccessContent permission by prinperm # The roles who has plone.AccessContent permission by roleperm users = [] roles = [] if request is None: request = get_current_request() interaction = IInteraction(request) for user in interaction.participations: # pylint: disable=E1133 users.append(user.principal.id) users.extend(user.principal.groups) roles_dict = interaction.global_principal_roles( user.principal.id, user.principal.groups) roles.extend([key for key, value in roles_dict.items() if value]) # We got all users and roles # users: users and groups should_list = [{'match': {'access_roles': x}} for x in roles] should_list.extend([{'match': {'access_users': x}} for x in users]) permission_query = { 'query': { 'bool': { 'filter': { 'bool': { 'should': should_list, 'minimum_should_match': 1 } } } } } query = merge_dicts(query, permission_query) # query.update(permission_query) q['body'] = query q['size'] = size if scroll: q['scroll'] = scroll logger.debug(q) return q
async def get_registry(self, container, request): if request is None: request = get_current_request() if hasattr(request, 'container_settings'): return request.container_settings annotations_container = IAnnotations(container) request.container_settings = await annotations_container.async_get( REGISTRY_DATA_KEY) return request.container_settings
async def search_raw( self, container, query, doc_type=None, size=10, request=None, scroll=None, index=None, ): """ Search raw query """ if index is None: index = await self.get_container_index_name(container) t1 = time.time() if request is None: try: request = get_current_request() except RequestNotFound: pass q = await self._build_security_query(container, query, size, scroll) q["ignore_unavailable"] = True logger.debug("Generated query %s", json.dumps(query)) conn = self.get_connection() result = await conn.search(index=index, **q) if result.get("_shards", {}).get("failed", 0) > 0: logger.warning(f'Error running query: {result["_shards"]}') error_message = "Unknown" for failure in result["_shards"].get("failures") or []: error_message = failure["reason"] raise QueryErrorException(reason=error_message) items = self._get_items_from_result(container, request, result) if ELASTIC6: items_total = result["hits"]["total"] else: items_total = result["hits"]["total"]["value"] final = {"items_total": items_total, "items": items} if "aggregations" in result: final["aggregations"] = result["aggregations"] if "suggest" in result: final["suggest"] = result["suggest"] if "profile" in result: final["profile"] = result["profile"] if "_scroll_id" in result: final["_scroll_id"] = result["_scroll_id"] tdif = time.time() - t1 logger.debug(f"Time ELASTIC {tdif}") await notify(SearchDoneEvent(query, items_total, request, tdif)) return final
async def update_by_query(self, query, context=None, indexes=None): if indexes is None: request = get_current_request() indexes = await self.get_current_indexes(request.container) if context is not None: for index in await get_content_sub_indexes( request.container, get_content_path(context)): indexes.append(index['index']) return await self._update_by_query(query, ','.join(indexes))
async def container_added(conversation, event): user_id = get_authenticated_user_id(get_current_request()) if user_id not in conversation.users: conversation.users.append(user_id) manager = IPrincipalRoleManager(conversation) for user in conversation.users or []: manager.assign_role_to_principal( 'guillotina_chat.ConversationParticipant', user)
async def get_index_for(context, container=None, request=None): im = find_index_manager(parent=context) if im is None: if container is None: if request is None: request = get_current_request() container = request.container im = get_adapter(container, IIndexManager) return await im.get_index_name()
async def security_changed(obj, event): if IGroupFolder.providedBy(obj): # assuming permissions for group are already handled correctly with # group:group id principal return # We need to reindex the objects below request = get_current_request() request._futures.update( {obj.id: AsyncCatalogReindex(obj, request, security=True)()})
def get_conflict_summary(self, oid, txn, old_serial, writer): from guillotina.utils import get_current_request try: req = get_current_request() except RequestNotFound: req = None max_attempts = app_settings.get('conflict_retry_attempts', 3) attempts = getattr(req, '_retry_attempt', 0) return f'''Object ID: {oid}
def __init__(self, request=None): self.participations = [] self._cache = {} self.principal = None if request is not None: self.request = request else: # Try magic request lookup if request not given self.request = get_current_request()
async def install(container, addon): request = get_current_request() addon_config = app_settings['available_addons'][addon] handler = addon_config['handler'] for dependency in addon_config['dependencies']: await install(container, dependency) await apply_coroutine(handler.install, container, request) registry = task_vars.registry.get() config = registry.for_interface(IAddons) config['enabled'] |= {addon}
async def iterate_bucket(self): req = get_current_request() bucket_name = await self.get_bucket_name() result = await self._s3aioclient.list_objects( Bucket=bucket_name, Prefix=req._container_id + '/') paginator = self._s3aioclient.get_paginator('list_objects') async for result in paginator.paginate(Bucket=bucket_name, Prefix=req._container_id + '/'): for item in result.get('Contents', []): yield item
async def test_gcr_memory(self): self.request = get_mocked_request() count = 0 current = resource.getrusage( resource.RUSAGE_SELF).ru_maxrss / 1024.0 / 1024.0 while True: count += 1 utils.get_current_request() if count % 1000000 == 0: break if count % 100000 == 0: gc.collect() new = resource.getrusage( resource.RUSAGE_SELF).ru_maxrss / 1024.0 / 1024.0 if new - current > 10: # memory leak, this shouldn't happen assert new == current
def check_read_only(self): if self.request is None: try: self.request = get_current_request() except RequestNotFound: return False if hasattr(self.request, '_db_write_enabled') and not self.request._db_write_enabled: raise Unauthorized('Adding content not permited') # Add the new tid if self._manager._storage._read_only: raise ReadOnlyError()
async def asubscribers(self, objects, provided): from guillotina.utils import get_current_request, get_authenticated_user_id, get_dotted_name from guillotina.exceptions import RequestNotFound from guillotina import task_vars if len(objects) > 1: event = get_dotted_name(objects[1]) context = getattr(objects[0], "__uuid__", None) else: event = get_dotted_name(objects[0]) context = None try: request = get_current_request() except RequestNotFound: request = None try: url = request.url.human_repr() except AttributeError: # older version of aiohttp url = "" info = { "url": url, "container": getattr(task_vars.container.get(), "id", None), "user": get_authenticated_user_id(), "db_id": getattr(task_vars.db.get(), "id", None), "request_uid": getattr(request, "_uid", None), "method": getattr(request, "method", None), "subscribers": [], "context": context, "event": event, } start = time.time() * 1000 subscriptions = sorted( self.subscriptions(map(providedBy, objects), provided), key=lambda sub: getattr(sub, "priority", 100), ) info["lookup_time"] = (time.time() * 1000) - start info["found"] = len(subscriptions) results = [] for subscription in subscriptions: start = time.time() * 1000 if asyncio.iscoroutinefunction(subscription): results.append(await subscription(*objects)) else: results.append(subscription(*objects)) info["subscribers"].append({ "duration": (time.time() * 1000) - start, "name": get_dotted_name(subscription) }) info["duration"] = (time.time() * 1000) - start profile_logger.info(info) return results
def get(self, request=None): """Return the current request specific transaction """ if request is None: try: request = get_current_request() except RequestNotFound: pass if request is None: return self._last_txn return request._txn
def __init__(self, utility, context, response=noop_response, force=False, log_details=False, memory_tracking=False, request=None, bulk_size=40, full=False, reindex_security=False, mapping_only=False): self.utility = utility self.conn = utility.conn self.context = context self.response = response self.force = force self.full = full self.log_details = log_details self.memory_tracking = memory_tracking self.bulk_size = bulk_size self.reindex_security = reindex_security if mapping_only and full: raise Exception( 'Can not do a full reindex and a mapping only migration') self.mapping_only = mapping_only if request is None: self.request = get_current_request() self.request._db_write_enabled = False else: self.request = request # make sure that we don't cache requests... self.request._txn._cache = DummyCache(self.request._txn) self.container = self.request.container self.interaction = IInteraction(self.request) self.indexer = Indexer() self.batch = {} self.indexed = 0 self.processed = 0 self.missing = [] self.orphaned = [] self.existing = [] self.errors = [] self.mapping_diff = {} self.start_time = self.index_start_time = time.time() self.reindex_futures = [] self.status = 'started' self.active_task_id = None self.copied_docs = 0 self.work_index_name = None
async def update_by_query(self, query): request = get_current_request() index_name = await self.get_index_name(request.container) resp = None resp = await self._update_by_query(query, index_name) next_index_name = await self.get_next_index_name( request.container, request=request) if next_index_name: async with self._migration_lock: await self._update_by_query(query, next_index_name) return resp
async def _query(self, context: IResource, query: ParsedQueryInfo, unrestricted: bool = False): sql, arguments = self.build_query(context, query, ["id", "zoid", "json"], unrestricted=unrestricted) txn = get_current_transaction() conn = await txn.get_connection() results = [] fullobjects = query["fullobjects"] container = find_container(context) if container is None: raise ContainerNotFound() try: context_url = get_object_url(container) request = get_current_request() except RequestNotFound: context_url = get_content_path(container) request = None logger.debug(f"Running search:\n{sql}\n{arguments}") async with txn.lock: records = await conn.fetch(sql, *arguments) for record in records: data = json.loads(record["json"]) if fullobjects and request is not None and txn is not None: # Get Object obj = await txn.get(data["uuid"]) # Serialize object view = DefaultGET(obj, request) result = await view() else: result = self.load_meatdata(query, data) result["@name"] = record["id"] result["@uid"] = record["zoid"] result["@id"] = data[ "@absolute_url"] = context_url + data["path"] results.append(result) # also do count... total = len(results) if total >= query["size"] or query["_from"] != 0: sql, arguments = self.build_count_query(context, query, unrestricted=unrestricted) logger.debug(f"Running search:\n{sql}\n{arguments}") async with txn.lock: records = await conn.fetch(sql, *arguments) total = records[0]["count"] return {"items": results, "items_total": total}
def _get_current_tid(self): # make sure to get current committed tid or we may be one-behind # for what was actually used to commit to db try: request = get_current_request() txn = get_transaction(request) tid = None if txn: tid = txn._tid except RequestNotFound: pass return tid
def register_ws(self, ws): request = get_current_request() multi_params = request.query_string for parametro in multi_params.split('&'): ricercato = parametro.split('=') if ricercato[0] == 'userId': ws.user_id = ricercato[1] self._webservices.append(ws) print(ws.user_id)
async def deserialize_cloud_field(field, value, context): request = get_current_request() value = convert_base64_to_binary(value) if IContentBehavior.implementedBy(context.__class__): field = field.bind(context) context = context.context else: field = field.bind(context) file_manager = get_multi_adapter((context, request, field), IFileManager) val = await file_manager.save_file(partial(_generator, value), content_type=value['content_type'], size=len(value['data'])) return val
def convert_interfaces_to_schema(interfaces): properties = {} try: request = get_current_request() except RequestNotFound: from guillotina.tests.utils import get_mocked_request request = get_mocked_request() for iface in interfaces: serializer = get_multi_adapter((iface, request), ISchemaSerializeToJson) properties[iface.__identifier__] = serializer.serialize() return properties
def convert_interfaces_to_schema(interfaces): properties = {} try: request = get_current_request() except RequestNotFound: from guillotina.tests.utils import get_mocked_request request = get_mocked_request() for iface in interfaces: serializer = get_multi_adapter( (iface, request), ISchemaSerializeToJson) properties[iface.__identifier__] = serializer.serialize() return properties
async def run_create(container): request = get_current_request() txn = get_transaction(request) tm = get_tm(request) print(f'Test content create') start = time.time() for _ in range(ITERATIONS): id_ = uuid.uuid4().hex await create_content_in_container(container, 'Item', id_) await tm.commit(txn=txn) await tm.begin(request=request) end = time.time() print(f'Done with {ITERATIONS} in {end - start} seconds')
async def create_content_in_container( parent: Folder, type_: str, id_: str, request: IRequest=None, check_security=True, **kw) -> Resource: """Utility to create a content. This method is the one to use to create content. id_ can be None :param parent: where to create content inside of :param type_: content type to create :param id_: id to give content in parent object :param request: <optional> :param check_security: be able to disable security checks """ factory = get_cached_factory(type_) if check_security and factory.add_permission: if factory.add_permission in PERMISSIONS_CACHE: permission = PERMISSIONS_CACHE[factory.add_permission] else: permission = query_utility(IPermission, name=factory.add_permission) PERMISSIONS_CACHE[factory.add_permission] = permission if request is None: request = get_current_request() if permission is not None and \ not IInteraction(request).check_permission(permission.id, parent): raise NoPermissionToAdd(str(parent), type_) constrains = IConstrainTypes(parent, None) if constrains is not None: if not constrains.is_type_allowed(type_): raise NotAllowedContentType(str(parent), type_) # We create the object with at least the ID obj = factory(id=id_, parent=parent) for key, value in kw.items(): setattr(obj, key, value) txn = getattr(parent, '_p_jar', None) or get_transaction() if txn is None or not txn.storage.supports_unique_constraints: # need to manually check unique constraints if await parent.async_contains(obj.id): raise ConflictIdOnContainer(f'Duplicate ID: {parent} -> {obj.id}') obj.__new_marker__ = True await notify(BeforeObjectAddedEvent(obj, parent, id_)) await parent.async_set(obj.id, obj) return obj
def get_principals_with_access_content(obj, request=None): if obj is None: return {} if request is None: request = get_current_request() interaction = IInteraction(request) roles = interaction.cached_roles(obj, 'guillotina.AccessContent', 'o') result = [] all_roles = role.global_roles() + role.local_roles() for r in roles.keys(): if r in all_roles: result.append(r) users = interaction.cached_principals(obj, result, 'guillotina.AccessContent', 'o') return list(users.keys())
def _invalidated_interaction_cache(self): # Invalidate this threads interaction cache try: request = get_current_request() except RequestNotFound: return interaction = IInteraction(request) if interaction is not None: try: invalidate_cache = interaction.invalidate_cache except AttributeError: pass else: invalidate_cache()
def get_roles_with_access_content(obj, request=None): """ Return the roles that has access to the content that are global roles""" if obj is None: return [] if request is None: request = get_current_request() interaction = IInteraction(request) roles = interaction.cached_roles(obj, 'guillotina.AccessContent', 'o') result = [] all_roles = role.global_roles() + role.local_roles() for r in roles.keys(): if r in all_roles: result.append(r) return result
async def start(self, dm): """Init an upload. _uload_file_id : temporal url to image beeing uploaded _resumable_uri : uri to resumable upload _uri : finished uploaded image """ util = get_utility(IGCloudBlobStore) request = get_current_request() upload_file_id = dm.get("upload_file_id") if upload_file_id is not None: await self.delete_upload(upload_file_id) generator = get_multi_adapter((self.context, self.field), IFileNameGenerator) upload_file_id = await apply_coroutine(generator) init_url = "{}&name={}".format( UPLOAD_URL.format(bucket=await util.get_bucket_name()), quote_plus(upload_file_id), ) creator = get_authenticated_user_id() metadata = json.dumps({ "CREATOR": creator, "REQUEST": str(request), "NAME": dm.get("filename") }) call_size = len(metadata) async with util.session.post( init_url, headers={ "AUTHORIZATION": "Bearer {}".format(await util.get_access_token()), "X-Upload-Content-Type": to_str(dm.content_type), "X-Upload-Content-Length": str(dm.size), "Content-Type": "application/json; charset=UTF-8", "Content-Length": str(call_size), }, data=metadata, ) as call: if call.status != 200: text = await call.text() raise GoogleCloudException(f"{call.status}: {text}") resumable_uri = call.headers["Location"] await dm.update(current_upload=0, resumable_uri=resumable_uri, upload_file_id=upload_file_id)
def __init__(self, txn): self._txn = txn self._conn = txn._db_conn self._storage = txn._manager._storage self._status = "none" self._priority = "LOW" try: request = get_current_request() attempts = getattr(request, "_retry_attempt", 0) if attempts == 1: self._priority = "NORMAL" elif attempts > 1: self._priority = "HIGH" except RequestNotFound: pass
def __init__(self, txn): self._txn = txn self._conn = txn._db_conn self._storage = txn._manager._storage self._status = 'none' self._priority = 'LOW' try: request = get_current_request() attempts = getattr(request, '_retry_attempt', 0) if attempts == 1: self._priority = 'NORMAL' elif attempts > 1: self._priority = 'HIGH' except RequestNotFound: pass
def get_future(): request = get_current_request() try: container = request.container search = query_utility(ICatalogUtility) except (AttributeError, KeyError): return if not search: return # no search configured fut = request.get_future('indexer') if fut is None: fut = IndexFuture(container, request) request.add_future('indexer', fut) return fut
async def begin(self, request=None): """Starts a new transaction. """ if request is None: try: request = get_current_request() except RequestNotFound: pass user = None txn = None # already has txn registered, as long as connection is closed, it # is safe if (getattr(request, '_txn', None) is not None and request._txn.status in (Status.ABORTED, Status.COMMITTED, Status.CONFLICT)): # re-use txn if possible txn = request._txn txn.status = Status.ACTIVE if (txn._db_conn is not None and getattr(txn._db_conn, '_in_use', None) is None): try: await self._close_txn(txn) except Exception: logger.warn('Unable to close spurious connection', exc_info=True) else: txn = Transaction(self, request=request) self._last_txn = txn if request is not None: # register tm and txn with request request._tm = self request._txn = txn user = get_authenticated_user_id(request) if user is not None: txn.user = user await txn.tpc_begin() return txn
def log(self, *args, **kwargs): from guillotina.utils import get_authenticated_user_id from guillotina.utils import get_current_request from guillotina.exceptions import RequestNotFound func = getattr(self._logger, name) request = kwargs.pop('request', None) eid = kwargs.pop('eid', None) if request is None: try: request = get_current_request() except RequestNotFound: pass if request is not None: if eid is None: eid = uuid.uuid4().hex extra = kwargs.get('extra', {}) try: url = request.url.human_repr() except AttributeError: # older version of aiohttp url = request.path try: agent = request.headers['User-Agent'] except (AttributeError, KeyError): agent = 'Unknown' extra.update({ 'method': request.method, 'url': url, 'container': getattr(request, '_container_id', None), 'account': getattr(request, '_container_id', None), 'db_id': getattr(request, '_db_id', None), 'user': get_authenticated_user_id(request) or 'Anonymous', 'eid': eid, 'agent': agent, # in case a fake req object doesn't use the guillotina Request object 'request_uid': getattr(request, 'uid', None) }) kwargs['extra'] = extra return func(*args, **kwargs)
async def deserialize_cloud_field(field, value, context): # It supports base64 web value or a dict data_context = context if IContentBehavior.implementedBy(context.__class__): field = field.bind(context) context = context.context else: field = field.bind(context) if isinstance(value, dict): try: file_ob = field.get(data_context) except AttributeError: file_ob = None if file_ob is not None: # update file fields for key, item_value in value.items(): if key in serialize_mappings: setattr(file_ob, serialize_mappings[key], item_value) data_context._p_register() if 'data' in value: value['data'] = base64.b64decode(value['data']) else: # already updated necessary values return file_ob else: # base64 web value value = convert_base64_to_binary(value) # There is not file and expecting a dict # 'data', 'encoding', 'content-type', 'filename' request = get_current_request() file_manager = get_multi_adapter((context, request, field), IFileManager) content_type = value.get('content_type', value.get('content-type')) filename = value.get('filename', None) val = await file_manager.save_file( partial(_generator, value), content_type=content_type, size=len(value['data']), filename=filename) return val
def __init__(self, context): request = get_current_request() super().__init__(context, request)
def moved_object(obj, event): request = get_current_request() reindex_in_future(obj, request, True)
def get_conflict_summary(self, oid, txn, old_serial, writer): from guillotina.utils import get_current_request req = get_current_request() max_attempts = app_settings.get('conflict_retry_attempts', 3) attempts = getattr(req, '_retry_attempt', 0) return f'''Object ID: {oid}