def _get_queue_events_data(email): # type: (Text) -> Dict[str, Dict[str, str]] user_profile = UserProfile.objects.filter(email=email).first() events_query = { 'queue_id': None, 'narrow': [], 'handler_id': 0, 'user_profile_email': user_profile.email, 'all_public_streams': False, 'client_type_name': 'website', 'new_queue_data': { 'apply_markdown': True, 'narrow': [], 'user_profile_email': user_profile.email, 'all_public_streams': False, 'realm_id': user_profile.realm_id, 'client_type_name': 'website', 'event_types': None, 'user_profile_id': user_profile.id, 'queue_timeout': 0, 'last_connection_time': time.time() }, 'last_event_id': -1, 'event_types': None, 'user_profile_id': user_profile.id, 'dont_block': True, 'lifespan_secs': 0 } result = fetch_events(events_query) return result
def _get_queue_events_data(email: str) -> Dict[str, Dict[str, str]]: user_profile = UserProfile.objects.filter(email=email).first() events_query = { 'queue_id': None, 'narrow': [], 'handler_id': 0, 'user_profile_email': user_profile.email, 'all_public_streams': False, 'client_type_name': 'website', 'new_queue_data': { 'apply_markdown': True, 'client_gravatar': False, 'narrow': [], 'user_profile_email': user_profile.email, 'all_public_streams': False, 'realm_id': user_profile.realm_id, 'client_type_name': 'website', 'event_types': None, 'user_profile_id': user_profile.id, 'queue_timeout': 0, 'last_connection_time': time.time()}, 'last_event_id': -1, 'event_types': None, 'user_profile_id': user_profile.id, 'dont_block': True, 'lifespan_secs': 0 } result = fetch_events(events_query) return result
def get_events_backend( request: HttpRequest, user_profile: UserProfile, handler: BaseHandler, user_client: Optional[Client] = REQ(converter=get_client, default=None), last_event_id: Optional[int] = REQ(converter=int, default=None), queue_id: Optional[List[str]] = REQ(default=None), apply_markdown: bool = REQ(default=False, validator=check_bool), client_gravatar: bool = REQ(default=False, validator=check_bool), all_public_streams: bool = REQ(default=False, validator=check_bool), event_types: Optional[str] = REQ(default=None, validator=check_list(check_string)), dont_block: bool = REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]] = REQ(default=[], validator=check_list(None)), lifespan_secs: int = REQ(default=0, converter=int) ) -> Union[HttpResponse, _RespondAsynchronously]: if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict(user_profile_id=user_profile.id, user_profile_email=user_profile.email, queue_id=queue_id, last_event_id=last_event_id, event_types=event_types, client_type_name=valid_user_client.name, all_public_streams=all_public_streams, lifespan_secs=lifespan_secs, narrow=narrow, dont_block=dont_block, handler_id=handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id=user_profile.id, realm_id=user_profile.realm_id, user_profile_email=user_profile.email, event_types=event_types, client_type_name=valid_user_client.name, apply_markdown=apply_markdown, client_gravatar=client_gravatar, all_public_streams=all_public_streams, queue_timeout=lifespan_secs, last_connection_time=time.time(), narrow=narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def get_events_backend(request: HttpRequest, user_profile: UserProfile, handler: BaseHandler, user_client: Optional[Client]=REQ(converter=get_client, default=None), last_event_id: Optional[int]=REQ(converter=int, default=None), queue_id: Optional[List[str]]=REQ(default=None), apply_markdown: bool=REQ(default=False, validator=check_bool), client_gravatar: bool=REQ(default=False, validator=check_bool), all_public_streams: bool=REQ(default=False, validator=check_bool), event_types: Optional[str]=REQ(default=None, validator=check_list(check_string)), dont_block: bool=REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]]=REQ(default=[], validator=check_list(None)), lifespan_secs: int=REQ(default=0, converter=int) ) -> Union[HttpResponse, _RespondAsynchronously]: if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict( user_profile_id = user_profile.id, user_profile_email = user_profile.email, queue_id = queue_id, last_event_id = last_event_id, event_types = event_types, client_type_name = valid_user_client.name, all_public_streams = all_public_streams, lifespan_secs = lifespan_secs, narrow = narrow, dont_block = dont_block, handler_id = handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id = user_profile.id, realm_id = user_profile.realm_id, user_profile_email = user_profile.email, event_types = event_types, client_type_name = valid_user_client.name, apply_markdown = apply_markdown, client_gravatar = client_gravatar, all_public_streams = all_public_streams, queue_timeout = lifespan_secs, last_connection_time = time.time(), narrow = narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def get_events_backend(request, user_profile, handler, user_client=REQ(converter=get_client, default=None), last_event_id=REQ(converter=int, default=None), queue_id=REQ(default=None), apply_markdown=REQ(default=False, validator=check_bool), all_public_streams=REQ(default=False, validator=check_bool), event_types=REQ(default=None, validator=check_list(check_string)), dont_block=REQ(default=False, validator=check_bool), narrow=REQ(default=[], validator=check_list(None)), lifespan_secs=REQ(default=0, converter=int)): # type: (HttpRequest, UserProfile, BaseHandler, Optional[Client], Optional[int], Optional[List[text_type]], bool, bool, Optional[text_type], bool, Iterable[Sequence[text_type]], int) -> Union[HttpResponse, _RespondAsynchronously] if user_client is None: user_client = request.client events_query = dict(user_profile_id=user_profile.id, user_profile_email=user_profile.email, queue_id=queue_id, last_event_id=last_event_id, event_types=event_types, client_type_name=user_client.name, all_public_streams=all_public_streams, lifespan_secs=lifespan_secs, narrow=narrow, dont_block=dont_block, handler_id=handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id=user_profile.id, realm_id=user_profile.realm.id, user_profile_email=user_profile.email, event_types=event_types, client_type_name=user_client.name, apply_markdown=apply_markdown, all_public_streams=all_public_streams, queue_timeout=lifespan_secs, last_connection_time=time.time(), narrow=narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": return json_error(result["message"]) return json_success(result["response"])
def get_events_backend(request, user_profile, handler, user_client = REQ(converter=get_client, default=None), last_event_id = REQ(converter=int, default=None), queue_id = REQ(default=None), apply_markdown = REQ(default=False, validator=check_bool), all_public_streams = REQ(default=False, validator=check_bool), event_types = REQ(default=None, validator=check_list(check_string)), dont_block = REQ(default=False, validator=check_bool), narrow = REQ(default=[], validator=check_list(None)), lifespan_secs = REQ(default=0, converter=int)): # type: (HttpRequest, UserProfile, BaseHandler, Optional[Client], Optional[int], Optional[List[text_type]], bool, bool, Optional[text_type], bool, Iterable[Sequence[text_type]], int) -> Union[HttpResponse, _RespondAsynchronously] if user_client is None: user_client = request.client events_query = dict( user_profile_id = user_profile.id, user_profile_email = user_profile.email, queue_id = queue_id, last_event_id = last_event_id, event_types = event_types, client_type_name = user_client.name, all_public_streams = all_public_streams, lifespan_secs = lifespan_secs, narrow = narrow, dont_block = dont_block, handler_id = handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id = user_profile.id, realm_id = user_profile.realm.id, user_profile_email = user_profile.email, event_types = event_types, client_type_name = user_client.name, apply_markdown = apply_markdown, all_public_streams = all_public_streams, queue_timeout = lifespan_secs, last_connection_time = time.time(), narrow = narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": return json_error(result["message"]) return json_success(result["response"])
def get_events_backend( request: HttpRequest, user_profile: UserProfile, # user_client is intended only for internal Django=>Tornado requests # and thus shouldn't be documented for external use. user_client: Optional[Client] = REQ(converter=get_client, default=None, intentionally_undocumented=True), last_event_id: Optional[int] = REQ(converter=int, default=None), queue_id: Optional[str] = REQ(default=None), # apply_markdown, client_gravatar, all_public_streams, and various # other parameters are only used when registering a new queue via this # endpoint. This is a feature used primarily by get_events_internal # and not expected to be used by third-party clients. apply_markdown: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), client_gravatar: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), slim_presence: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), all_public_streams: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), event_types: Optional[Sequence[str]] = REQ( default=None, validator=check_list(check_string), intentionally_undocumented=True), dont_block: bool = REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]] = REQ(default=[], validator=check_list( check_list(check_string)), intentionally_undocumented=True), lifespan_secs: int = REQ(default=0, converter=to_non_negative_int, intentionally_undocumented=True), bulk_message_deletion: bool = REQ(default=False, validator=check_bool, intentionally_undocumented=True), ) -> HttpResponse: # Extract the Tornado handler from the request handler: AsyncDjangoHandler = request._tornado_handler if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict( user_profile_id=user_profile.id, queue_id=queue_id, last_event_id=last_event_id, event_types=event_types, client_type_name=valid_user_client.name, all_public_streams=all_public_streams, lifespan_secs=lifespan_secs, narrow=narrow, dont_block=dont_block, handler_id=handler.handler_id, ) if queue_id is None: events_query["new_queue_data"] = dict( user_profile_id=user_profile.id, realm_id=user_profile.realm_id, event_types=event_types, client_type_name=valid_user_client.name, apply_markdown=apply_markdown, client_gravatar=client_gravatar, slim_presence=slim_presence, all_public_streams=all_public_streams, queue_timeout=lifespan_secs, last_connection_time=time.time(), narrow=narrow, bulk_message_deletion=bulk_message_deletion, ) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data["extra"] = result["extra_log_data"] if result["type"] == "async": # Mark this response with .asynchronous; this will result in # Tornado discarding the response and instead long-polling the # request. See zulip_finish for more design details. handler._request = request response = json_success() response.asynchronous = True return response if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def get_events_backend(request: HttpRequest, user_profile: UserProfile, handler: BaseHandler, # user_client is intended only for internal Django=>Tornado requests # and thus shouldn't be documented for external use. user_client: Optional[Client]=REQ(converter=get_client, default=None, intentionally_undocumented=True), last_event_id: Optional[int]=REQ(converter=int, default=None), queue_id: Optional[str]=REQ(default=None), # apply_markdown, client_gravatar, all_public_streams, and various # other parameters are only used when registering a new queue via this # endpoint. This is a feature used primarily by get_events_internal # and not expected to be used by third-party clients. apply_markdown: bool=REQ(default=False, validator=check_bool, intentionally_undocumented=True), client_gravatar: bool=REQ(default=False, validator=check_bool, intentionally_undocumented=True), all_public_streams: bool=REQ(default=False, validator=check_bool, intentionally_undocumented=True), event_types: Optional[str]=REQ(default=None, validator=check_list(check_string), intentionally_undocumented=True), dont_block: bool=REQ(default=False, validator=check_bool), narrow: Iterable[Sequence[str]]=REQ(default=[], validator=check_list(None), intentionally_undocumented=True), lifespan_secs: int=REQ(default=0, converter=to_non_negative_int, intentionally_undocumented=True) ) -> Union[HttpResponse, _RespondAsynchronously]: if user_client is None: valid_user_client = request.client else: valid_user_client = user_client events_query = dict( user_profile_id = user_profile.id, user_profile_email = user_profile.email, queue_id = queue_id, last_event_id = last_event_id, event_types = event_types, client_type_name = valid_user_client.name, all_public_streams = all_public_streams, lifespan_secs = lifespan_secs, narrow = narrow, dont_block = dont_block, handler_id = handler.handler_id) if queue_id is None: events_query['new_queue_data'] = dict( user_profile_id = user_profile.id, realm_id = user_profile.realm_id, user_profile_email = user_profile.email, event_types = event_types, client_type_name = valid_user_client.name, apply_markdown = apply_markdown, client_gravatar = client_gravatar, all_public_streams = all_public_streams, queue_timeout = lifespan_secs, last_connection_time = time.time(), narrow = narrow) result = fetch_events(events_query) if "extra_log_data" in result: request._log_data['extra'] = result["extra_log_data"] if result["type"] == "async": handler._request = request return RespondAsynchronously if result["type"] == "error": raise result["exception"] return json_success(result["response"])
def wrapped_fetch_events(**query: Any) -> Dict[str, Any]: ret = event_queue.fetch_events(**query) asyncio.get_running_loop().call_soon(process_events) return ret