def send_browser_message(self, user): message = { "id": self.id, "subject": str(self.subject), "body": html2text(self.body), "created": self.created.strftime("%a %d %b %Y %H:%M"), } # Encode and send that message to the whole channels Group for our # Websocket. Note how you can send to a channel or Group from any part # of Django, not just inside a consumer. logger.info("Sending browser notification to %s", user.username) if not DJANGO2: from channels import Group Group(groupname(user.username)).send({ # WebSocket text frame, with JSON content "text": json.dumps(message), }) else: from channels.layers import get_channel_layer channel_layer = get_channel_layer() from asgiref.sync import async_to_sync async_to_sync(channel_layer.group_send)(user.username, {"type": "send.notification", "text": message['body']}) return
def send_browser_message_for_all_users(self, user): message = { "id": self.id, "subject": self.subject, "body": html2text(self.body), "created": self.created.strftime("%a %d %b %Y %H:%M"), } # Encode and send that message to the whole channels Group for our # liveblog. Note how you can send to a channel or Group from any part # of Django, not just inside a consumer. if not DJANGO2: from channels import Group Group(PUBLIC_GROUP).send({ # WebSocket text frame, with JSON content "text": json.dumps(message), }) else: from channels.layers import get_channel_layer channel_layer = get_channel_layer() from asgiref.sync import async_to_sync async_to_sync(channel_layer.group_send)(PUBLIC_GROUP, {"text": json.dumps(message)}) return
def post(self, request): """ """ body = json.loads(request.body) print("Got POST request with body: ", body) # VALIDATE TOKEN token = self._get_token(request) spotify_user = SpotifyUser(token) if not spotify_user.is_token_valid(): return HttpResponseForbidden('Spotify token is not valid') # VALIDATE SEARCH ARGS # Parse request for search arguments search_args = body if not self._valid_search_args(search_args): return HttpResponseBadRequest("Bad Search Arguments") # Get Playlist ID playlistURL = spotify_user.get_spotify_playlist(erase=True) # Asynchronously populate playlist async_to_sync(get_channel_layer().send)( "search", { "type": "start_search", "playlist": playlistURL, "search_args": search_args, "token": token, }, ) return JsonResponse({ "playlist": playlistURL })
def websocket_disconnect(self, message): # Leave users online group Presence.objects.remove_presence_from_disconnect(self.channel_name) async_to_sync(self.channel_layer.group_discard)( self.group_name, self.channel_name )
def _subscribe_to_playlist(self, playlist_id): # Save this ID for the rest of the subscribtion so that we can disconnect later. print("Client Subscribed to playlist ", playlist_id) self.playlist_id = playlist_id group_channel_name = self.playlist_id current_client_channel_name = self.channel_name async_to_sync(self.channel_layer.group_add)(group_channel_name, current_client_channel_name)
def disconnect(self, close_code): async_to_sync(self.channel_layer.group_discard)(self.scope['user'].username, self.channel_name) # class StatsConsumer(WebsocketConsumer): # # def connect(self): # async_to_sync(self.channel_layer.group_add)(self.scope['user'].username, self.channel_name) # # self.accept() # # def receive(self, text_data): # key = '-'.join(('django-mstats-processlist', str(self.scope['user'].uid))) # cache.set(key, 'start', timeout=None) # show_processlist.delay(host=text_data, user=self.scope['user'].username, key=key) # # async_to_sync(self.channel_layer.group_send)( # self.scope['user'].username, # { # "type": "user.message", # "text": text_data, # }, # ) # # def user_message(self, event): # self.send(text_data=event["text"]) # # def disconnect(self, close_code): # key = '-'.join(('django-mstats-processlist', str(self.scope['user'].uid))) # cache.set(key, 'end', timeout=None) # async_to_sync(self.channel_layer.group_discard)(self.scope['user'].username, self.channel_name)
def handle_abort(self, obj): """Handle an incoming ``Data`` abort processing request. .. IMPORTANT:: This only makes manager's state consistent and doesn't affect Data object in any way. Any changes to the Data must be applied over ``handle_update`` method. :param obj: The Channels message object. Command object format: .. code-block:: none { 'command': 'abort', 'data_id': [id of the :class:`~resolwe.flow.models.Data` object this command was triggered by], } """ async_to_sync(consumer.send_event)({ WorkerProtocol.COMMAND: WorkerProtocol.ABORT, WorkerProtocol.DATA_ID: obj[ExecutorProtocol.DATA_ID], WorkerProtocol.FINISH_COMMUNICATE_EXTRA: { 'executor': getattr(settings, 'FLOW_EXECUTOR', {}).get('NAME', 'resolwe.flow.executors.local'), }, })
def send_ws_action_chapter_member(sender, instance, created, **kwargs): channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)( "online_users", { 'type': 'action', 'payload': { 'type': 'chapter_member', 'id': instance.chapter_id, 'slug': instance.member.slug } } ) if instance.member.discord_id is not None: # send new chapter member alert to discord async_to_sync(channel_layer.group_send)( "discord_bot", { 'type': 'push_update', 'payload': { 'type': 'chapter_member', 'id': instance.member, 'discord_id': instance.member.discord_id, } } )
def get_whoami_data(self): """ Appends the user id to the context. Uses None for the anonymous user. Appends also a flag if guest users are enabled in the config. Appends also the serialized user if available. """ user_id = self.request.user.pk or 0 guest_enabled = anonymous_is_enabled() if user_id: user_data = async_to_sync(element_cache.get_element_restricted_data)( user_id, self.request.user.get_collection_string(), user_id ) group_ids = user_data["groups_id"] or [GROUP_DEFAULT_PK] else: user_data = None group_ids = [GROUP_DEFAULT_PK] if guest_enabled else [] # collect all permissions permissions: Set[str] = set() group_all_data = async_to_sync(element_cache.get_collection_full_data)( "users/group" ) for group_id in group_ids: permissions.update(group_all_data[group_id]["permissions"]) return { "user_id": user_id or None, "guest_enabled": guest_enabled, "user": user_data, "permissions": list(permissions), }
def connect(self): self.groupname="likes" self.accept() async_to_sync(self.channel_layer.group_add)( self.groupname, self.channel_name )
def get_key_to_id(self) -> Dict[str, int]: """ Returns the key_to_id dict. Builds it, if it does not exist. """ if self.key_to_id is None: async_to_sync(self.build_key_to_id)() self.key_to_id = cast(Dict[str, int], self.key_to_id) return self.key_to_id
def connect(self): if self.authentication_needed(): async_to_sync(self.channel_layer.group_add)( self.group_name(), self.channel_name ) self.accept() else: pass
def receive(self, text_data): async_to_sync(self.channel_layer.group_send)( self.scope['user'].username, { "type": "user.message", "text": text_data, }, )
def announce_likes(sender, instance, created, **kwargs): if created: channel_layer=get_channel_layer() async_to_sync(channel_layer.group_send)( "likes", { "type": "like_message", "message": instance.contents, } )
def notify_new_post(username, text): async_to_sync(channel_layer.group_send)( GROUP_NAME, { 'type': 'update_feed', 'text': text, 'username': username } )
def connect(self): # Join room group self.group_name="likes" async_to_sync(self.channel_layer.group_add)( self.group_name, self.channel_name ) self.accept()
def connect(self): self.room_group_name = 'stream' # Join room group async_to_sync(self.channel_layer.group_add)( self.room_group_name, self.channel_name ) self.accept()
def save(self, *args, **kwargs): super().save(*args, **kwargs) if not self.reply: channel_layer = get_channel_layer() payload = { "type": "receive", "key": "additional_news", "actor_name": self.user.username } async_to_sync(channel_layer.group_send)('notifications', payload)
def receive(self, text_data): text_data_json = json.loads(text_data) message = text_data_json['message'] async_to_sync(self.channel_layer.group_send)( self.groupname, { 'type': 'like_message', 'message': message } )
def save(self): # 1. Unconfirm the other, if necessary if self.cleaned_data['confirmed']: if self.debate.confirmed_ballot != self.ballotsub and self.debate.confirmed_ballot is not None: self.debate.confirmed_ballot.confirmed = False self.debate.confirmed_ballot.save() # 2. Save ballot submission so that we can create related objects if self.ballotsub.pk is None: self.ballotsub.save() # 3. Save the specifics of the ballot self.save_ballot() # 4. Save ballot and result status self.ballotsub.discarded = self.cleaned_data['discarded'] self.ballotsub.confirmed = self.cleaned_data['confirmed'] self.ballotsub.save() self.debate.result_status = self.cleaned_data['debate_result_status'] self.debate.save() t = self.debate.round.tournament # Need to provide a timestamp immediately for BallotStatusConsumer # as it will broadcast before the view finishes assigning one if self.ballotsub.confirmed: self.ballotsub.confirm_timestamp = timezone.now() # 5. Notify the Latest Results consumer (for results/overview) if self.ballotsub.confirmed: if self.debate.result_status is self.debate.STATUS_CONFIRMED: group_name = BallotResultConsumer.group_prefix + "_" + t.slug async_to_sync(get_channel_layer().group_send)(group_name, { "type": "send_json", "data": self.ballotsub.serialize_like_actionlog }) # 6. Notify the Results Page/Ballots Status Graph group_name = BallotStatusConsumer.group_prefix + "_" + t.slug meta = get_status_meta(self.debate) async_to_sync(get_channel_layer().group_send)(group_name, { "type": "send_json", "data": { 'status': self.cleaned_data['debate_result_status'], 'icon': meta[0], 'class': meta[1], 'sort': meta[2], 'ballot': self.ballotsub.serialize(t), 'round': self.debate.round.id } }) return self.ballotsub
def connect(self): self.room_name = self.scope['url_route']['kwargs']['room_name'] self.room_group_name = 'chat_%s' % self.room_name # Join room group async_to_sync(self.channel_layer.group_add)( self.room_group_name, self.channel_name ) self.accept()
def reset_cache(request): """ Resetts the cache for every test """ if "django_db" in request.node.keywords or is_django_unittest(request): # When the db is created, use the original cachables async_to_sync(element_cache.cache_provider.clear_cache)() element_cache.ensure_cache(reset=True) # Set constant start_time element_cache.start_time = 1
def receive(self, text_data): text_data_json = json.loads(text_data) message = text_data_json['message'] # Send message to room group async_to_sync(self.channel_layer.group_send)( self.room_group_name, { 'type': 'chat_message', 'message': message } )
def send_ws_action_user(sender, instance, **kwargs): channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)( "online_users", { 'type': 'action', 'payload': { 'type': 'user', 'slug': instance.slug } } )
def reset(self, keep_state=False): """Reset the shared state and drain Django Channels. :param keep_state: If ``True``, do not reset the shared manager state (useful in tests, where the settings overrides need to be kept). Defaults to ``False``. """ if not keep_state: self.state = state.ManagerState(state.MANAGER_STATE_PREFIX) self.state.reset() async_to_sync(consumer.run_consumer)(timeout=1) async_to_sync(self.sync_counter.reset)()
def websocket_disconnect(self, message): """ Called when a WebSocket connection is closed. Base level so you don't need to call super() all the time. """ try: for group in self.groups: async_to_sync(self.channel_layer.group_discard)(group, self.channel_name) except AttributeError: raise InvalidChannelLayerError("BACKEND is unconfigured or doesn't support groups") self.disconnect(message["code"]) raise StopConsumer()
def dispatch(self): # Finally, dispatch the message to the group if not self.is_valid(): return response = {} response['data'] = self.get_allowed_data() response['type'] = self.sub_type response = serializer.filter_json(response) async_to_sync(get_channel_layer().group_send)( self.group_name, response )
def send_ws_action(sender, instance, created, **kwargs): channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)( "online_users", { 'type': 'action', 'payload': { 'type': 'comment', 'id': instance.id } } )
def receive_json(self, content): # Because the public can receive but not send checkins we need to # re-authenticate here: if not self.scope["user"].is_authenticated: return # Send message to room group about the new checkin async_to_sync(self.channel_layer.group_send)( self.group_name(), { 'type': 'broadcast_checkin', 'content': content } )
def update(cls): if not cls.thread: cls.thread=uuid4().hex aircrafts = Aircraft.objects.filter(state__gte=1) if not aircrafts.count(): return acfts = [] for aircraft in aircrafts: acfts.append(aircraft) message = {'type': 'aircrafts_update', 'Model':'Aircraft','data':json.loads(serialize('json',acfts))} channel_layer = get_channel_layer() llogger.debug("Updater: sending %s" % message) async_to_sync(channel_layer.group_send)("aircrafts",message)
def connect(self): # Join room group async_to_sync(self.channel_layer.group_add)('all', self.channel_name) self.accept()
from datetime import datetime import pytz import ujson as json from asgiref.sync import async_to_sync from consoleme.config import config from consoleme.lib.redis import RedisHandler log = config.get_logger() red = async_to_sync(RedisHandler().redis)() async def delete_expired_challenges(all_challenges): current_time = int(datetime.utcnow().replace(tzinfo=pytz.UTC).timestamp()) expired_challenge_tokens = [] for token, challenge_j in all_challenges.items(): challenge = json.loads(challenge_j) if challenge.get("ttl", 0) < current_time: expired_challenge_tokens.append(token) if expired_challenge_tokens: red.hdel( config.get("challenge_url.redis_key", "TOKEN_CHALLENGES_TEMP"), *expired_challenge_tokens, ) async def retrieve_user_challenge(request, requested_challenge_token, log_data): current_time = int(datetime.utcnow().replace(tzinfo=pytz.UTC).timestamp()) # Get fresh challenge for user's request
def _send_update_to_game_room(self): channel_layer = get_channel_layer() # Send update notification to room group async_to_sync(channel_layer.group_send)(self.room_group_name(), { 'type': 'update_room', })
def jump(self, request, pk): serializer = MoveSerializer(data=request.data) game = self.get_object() board = game.getBoard() turn = game.turn_set.filter(complete=False) jumping_piece = None if turn.count() > 1: return Response({"error": "Turns are out of sync"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR) elif turn.count() == 1: turn = turn[0] # this is the piece that's jumping jumping_piece = turn.jump_set.latest('created').to_sq turn_num = game.getTurnNum() print(turn_num) if serializer.is_valid(): if jumping_piece is not None and serializer.validated_data[ 'from_sq'] != jumping_piece: # make sure you keep jumping with the same piece return Response({"error": 'Not the jumping piece'}, status=status.HTTP_400_BAD_REQUEST) (move_num, new_layout) = Checkers.jumpPiece( board.layout, serializer.validated_data['from_sq'], serializer.validated_data['to_sq'], turn_num) if new_layout: # a jump changed the board board.layout = new_layout board.save() if not turn: turn = Turn(game=game) if (move_num == game.getTurnNum()): turn.complete = False else: turn.complete = True turn.save() jump = Jump(from_sq=serializer.validated_data['from_sq'], to_sq=serializer.validated_data['to_sq'], moved_by=request.user, turn=turn) jump.save() winner = Checkers.checkWin(new_layout) if winner and winner == 'b': game.winner = game.black_player game.save() channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)("notifier", { 'type': "chat.message", 'message': "Black wins!", 'sender': 0, }) elif winner: game.winner = game.white_player game.save() channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)("notifier", { 'type': "chat.message", 'message': "Red wins!", 'sender': 0, }) # if turn.complete: channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)( "notifier", { 'type': "notify.turn", 'message': "{},{}".format(game.id, game.getTurnNum()) }) board_serializer = BoardSerializer(board) return Response({ "jump": serializer.data, "board": board_serializer.data, "turn": game.getTurnNum(), "end": winner != None }) else: return Response({"error": 'Invalid jump'}, status=status.HTTP_400_BAD_REQUEST) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def handle(self, **kwargs): """handlers.handle(), synchronous.""" request = self.build_request(**kwargs) return async_to_sync(handlers.handle)(request)
def handle_changed_elements(elements: Iterable[Element]) -> None: """ Helper function, that sends elements through a channel to the autoupdate system and updates the cache. Does nothing if elements is empty. """ async def update_cache(elements: Iterable[Element]) -> int: """ Async helper function to update the cache. Returns the change_id """ cache_elements: Dict[str, Optional[Dict[str, Any]]] = {} for element in elements: element_id = get_element_id(element["collection_string"], element["id"]) cache_elements[element_id] = element["full_data"] return await element_cache.change_elements(cache_elements) async def async_handle_collection_elements( elements: Iterable[Element]) -> None: """ Async helper function to update cache and send autoupdate. """ # Update cache change_id = await update_cache(elements) # Send autoupdate channel_layer = get_channel_layer() await channel_layer.group_send("autoupdate", { "type": "send_data", "change_id": change_id }) projector_data = await get_projector_data() # Send projector channel_layer = get_channel_layer() await channel_layer.group_send("projector", { "type": "projector_changed", "data": projector_data }) if elements: for element in elements: if element.get("reload"): model = get_model_from_collection_string( element["collection_string"]) try: instance = model.objects.get(pk=element["id"]) except model.DoesNotExist: # The instance was deleted so we set full_data explicitly to None. element["full_data"] = None else: element["full_data"] = instance.get_full_data() # Save histroy here using sync code. history_instances = save_history(elements) # Convert history instances to Elements. history_elements: List[Element] = [] for history_instance in history_instances: history_elements.append( Element( id=history_instance.get_rest_pk(), collection_string=history_instance.get_collection_string(), full_data=history_instance.get_full_data(), disable_history= True, # This does not matter because history elements can never be part of the history itself. )) # Chain elements and history elements. itertools.chain(elements, history_elements) # Update cache and send autoupdate using async code. async_to_sync(async_handle_collection_elements)(itertools.chain( elements, history_elements))
def send_reply_response(self,message): async_to_sync (self.send(text_data = json.dumps({ 'message':message })))
def upload(request): file = request.FILES.get("data", None) user_to = request.data.get("username", None) file_type = request.data.get("file_type", False) uuid_ = request.data.get("uuid", None) file_type = int_try_parse(file_type, 1) if (file is None or user_to is None): return JsonResponse( data={"error": get_error_serialized(MISSING_REQUIRED_FIELDS).data}, status=HTTPStatus.BAD_REQUEST) User = get_user_model() user_to = User.objects.filter(username=user_to).first() if (user_to is None): return JsonResponse(data={ "error": get_error_serialized(OBJECT_NOT_FOUND, detail="user not found").data }, status=HTTPStatus.NOT_FOUND) chat = DirectChatMessage.objects.create(_to=user_to, text="", _from=request.user, seen=False, file_type=file_type) if (uuid_ is not None): try: uuid_ = uuid.UUID(uuid_) chat.uuid = uuid_ except ValueError: chat.delete() return JsonResponse(data={ "error": get_error_serialized(MISSING_REQUIRED_FIELDS, detail="Invalid UUID").data }, status=HTTPStatus.BAD_REQUEST) file = ChatFiles.objects.create(chat=chat, is_image=False, file=file) chat.text = "media/" + str(file.file) chat.save() other_user_active_sessions = Clients.objects.filter(username=user_to) channel_layer = get_channel_layer() data = { "type": "chat.message.recieve", "_from": PublicProfileSerializer(request.user).data, "message": DirectChatViewSerializer(instance=chat, context={ "target_username": user_to.username }).data } for session in other_user_active_sessions: async_to_sync(channel_layer.send)(session.channel_name, data) return JsonResponse(data={ 'message': "message sent successfully", "url": chat.text }, status=HTTPStatus.CREATED)
def disconnect(self, close_code): group.objects.filter(groupname=self.channel_group_name)[0].user.remove( self.u1) async_to_sync(self.channel_layer.group_discard)( self.channel_group_name, self.channel_name)
def receive(self, text_data): async_to_sync(self.channel_layer.group_send)(self.channel_group_name, { "type": "sendfun", "data": text_data, })
def disconnect(self, code): async_to_sync(self.channel_layer.group_discard)(self.room_group_name, self.channel_name)
def call_api(bot, action, params, echo=None, **kwargs): if "async" not in action and not echo: action = action + "_async" if "send_" in action and "_msg" in action: params["message"] = handle_message(bot, params["message"]) jdata = {"action": action, "params": params} if echo: jdata["echo"] = echo post_type = kwargs.get("post_type", "websocket") if post_type == "websocket": async_to_sync(channel_layer.send)(bot.api_channel_name, { "type": "send.event", "text": json.dumps(jdata) }) elif post_type == "http": url = os.path.join( bot.api_post_url, "{}?access_token={}".format(action, bot.access_token)) headers = {"Content-Type": "application/json"} r = requests.post(url=url, headers=headers, data=json.dumps(params), timeout=5) if r.status_code != 200: print("HTTP Callback failed:") print(r.text) elif post_type == "wechat": print("calling api:{}".format(action)) def req_url(params): url = "https://ex-api.botorange.com/message/send" headers = {"Content-Type": "application/json"} print("params:{}".format(json.dumps(params))) r = requests.post(url=url, headers=headers, data=json.dumps(params), timeout=5) if r.status_code != 200: print("Wechat HTTP Callback failed:") print(r.text) config = json.load(open(CONFIG_PATH, encoding="utf-8")) params["chatId"] = kwargs.get("chatId", "") params["token"] = config.get("WECHAT_TOKEN", "") if "send_" in action and "_msg" in action: if isinstance(params["message"], str): text = params["message"] at = re.finditer(r"\[CQ:at,qq=(.*)\]", text) if at: params["mention"] = [at_m.group(1) for at_m in at] text = re.sub(r"\[CQ:at,qq=(.*)\]", "", text) img_r = r"\[CQ:image,file=(.*?)(?:\]|,.*?\])" img_m = re.search(img_r, text) if img_m: # FIXME: handle text & img message params["messageType"] = 1 params["payload"] = {"url": img_m.group(1)} else: params["messageType"] = 0 params["payload"] = {"text": text.strip()} req_url(params) else: for msg_seg in params["message"]: if msg_seg["type"] == "image": params["messageType"] = 1 params["payload"] = {"url": msg_seg["data"]["file"]} req_url(params) elif msg_seg["type"] == "text": params["messageType"] = 0 params["payload"] = { "text": msg_seg["data"]["text"].strip() } req_url(params) time.sleep(1) elif post_type == "tomon": if "send_" in action and "_msg" in action: # print("Tomon Message >>> {}".format(params["message"])) attachments = [] if isinstance(params["message"], str): message = re.sub(r"\[CQ:at,qq=(.*)\]", "<@\g<1>>", params["message"]) message = re.sub(r"\[CQ:image,(?:cache=.,)?file=(.*)\]", " \g<1> ", params["message"]) elif isinstance(params["message"], list): message = "" for msg in params["message"]: if msg["type"] == "text": message += msg["data"] elif msg["type"] == "image": img_url = msg["data"]["file"] attachments.append({"url": img_url}) if attachments: for img in attachments: message += img["url"] + " " nonce = kwargs.get("nonce", "") data = {"content": message, "nonce": nonce} channel_id = kwargs.get("channel_id") or params.get("group_id") url = "https://beta.tomon.co/api/v1/channels/{}/messages".format( channel_id) headers = { "Authorization": "Bearer {}".format(bot.tomon_bot.all()[0].token), } if attachments: payload = {"payload_json": json.dumps(data)} img_format = attachments[0]["url"].split(".")[-1] original_image = requests.get(attachments[0]["url"], timeout=3) files = [("image.{}".format(img_format), original_image.content)] print("Posting Multipart to Tomon >>> {}".format(action)) print("{}".format(url)) r = requests.post( headers=headers, url=url, files=files, data=payload, timeout=30, ) print(headers) print(r.text) if r.status_code != 200: print("Tomon HTTP Callback failed:") print(r.text) return headers.update({"Content-Type": "application/json"}) # print("Posting Json to Tomon >>> {}".format(action)) # print("{}".format(url)) # print("{}".format(json.dumps(data))) r = requests.post(url=url, headers=headers, data=json.dumps(data), timeout=3) if r.status_code != 200: print("Tomon HTTP Callback failed:") print(r.text)
def receive(self, text_data): # Send message to room group async_to_sync(self.channel_layer.group_send)('all', { 'type': 'chat_message', 'message': text_data })
def connect(self): async_to_sync(self.channel_layer.group_add)('event', self.channel_name) self.accept()
def disconnect(self, close_code): # Leave room group async_to_sync(self.channel_layer.group_discard)(self.room_group_name, self.channel_name)
def _list(self, *args, **options): rc = async_to_sync(get_connections)() print("{:60} {}".format("filter", "est. number of connections")) for k, v in rc.items(): print("{:60} {}".format(k, v))
from cumulusci.utils import temporary_dir from django.conf import settings from django.contrib.auth import get_user_model from django.utils import timezone from django_rq import job from rq.exceptions import ShutDownImminentException from rq.worker import StopRequested from .cci_configs import MetaDeployCCI, extract_user_and_repo from .flows import StopFlowException from .models import Job, PreflightResult from .push import report_error, user_token_expired logger = logging.getLogger(__name__) User = get_user_model() sync_report_error = async_to_sync(report_error) @contextlib.contextmanager def finalize_result(result): try: yield result.status = result.Status.complete result.success_at = timezone.now() except (StopRequested, ShutDownImminentException): # When an RQ worker gets a SIGTERM, it will initiate a warm shutdown, # trying to wrap up existing tasks and then raising a # ShutDownImminentException or StopRequested exception. # So we want to mark any job that's not done by then as canceled # by catching that exception as it propagates back up. result.status = result.Status.canceled
def send_message_to_channel(self, message): async_to_sync(self.channel_layer.group_send)(self.room_group_name, { 'type': 'chat_message', 'message': message })
def disconnect(self, close_code): async_to_sync(self.channel_layer.group_discard)('event', self.channel_name) self.close()
def disconnect(self, _): async_to_sync(self.channel_layer.group_discard)( f'signals.{self.user.id}', self.channel_name)
def _send_disconnect(self, consumer_filter=None): async_to_sync(self.channel_layer.group_send)(self.group_name, { 'type': 'filtered.disconnect.consumer', 'filter': consumer_filter })
def send_messenger_message(self, message): # Send message to room group async_to_sync(self.channel_layer.group_send)(self.room_group_name, { 'type': 'chat_message', 'message': message })
def crawl_wb(weibouser, push=False): uid = weibouser.uid containerid = weibouser.containerid url = r'https://m.weibo.cn/api/container/getIndex?type=uid&value={}&containerid={}'.format( uid, containerid) s = requests.post(url=url, timeout=15) jdata = json.loads(s.text) if (jdata["ok"] == 1): for tile in jdata["data"]["cards"]: if (len(WeiboTile.objects.filter(itemid=tile.get("itemid", ""))) > 0): # print("crawled {} of {} before, pass".format(tile["itemid"], tile["itemid"])) continue t = WeiboTile(itemid=tile.get("itemid", "")) t.owner = weibouser t.content = json.dumps(tile) t.crawled_time = int(time.time()) if (tile.get("itemid", "") == ""): logging.info("pass a tile of {} cuz empty itemid".format( t.owner)) # logging.info(json.dumps(tile)) continue channel_layer = get_channel_layer() groups = weibouser.subscribed_by.all() # print("ready to push groups:{}".format(list(groups))) bots = QQBot.objects.all() t.save() for group in groups: for bot in bots: group_id_list = [ item["group_id"] for item in json.loads(bot.group_list) ] if json.loads(bot.group_list) else [] if int(group.group_id) not in group_id_list: continue try: msg = get_weibotile_share(t, mode="text") if bot.share_banned: content_json = json.loads(t.content) mblog = content_json["mblog"] bs = BeautifulSoup(mblog["text"], "html.parser") if "original_pic" in mblog.keys(): text = "{}\n{}\n{}".format( "{}\'s Weibo:\n========".format(t.owner), bs.get_text().replace("\u200b", "").strip(), content_json["scheme"]) msg = [{ "type": "text", "data": { "text": text }, }, { "type": "image", "data": { "file": mblog["original_pic"] }, }] else: msg = "{}\n{}\n{}".format( "{}\'s Weibo:\n========".format(t.owner), bs.get_text().replace("\u200b", "").strip(), content_json["scheme"]) logging.info("Pushing {} to group: {}".format( t, group)) # print("msg: {}".format(msg)) if push: t.pushed_group.add(group) jdata = { "action": "send_group_msg", "params": { "group_id": int(group.group_id), "message": msg }, "echo": "", } if not bot.api_post_url: async_to_sync(channel_layer.send)( bot.api_channel_name, { "type": "send.event", "text": json.dumps(jdata), }) else: url = os.path.join( bot.api_post_url, "{}?access_token={}".format( jdata["action"], bot.access_token)) headers = {'Content-Type': 'application/json'} r = requests.post(url=url, headers=headers, data=json.dumps( jdata["params"]), timeout=5) if r.status_code != 200: logging.error(r.text) except requests.ConnectionError as e: logging.error( "Pushing {} to group: {} ConnectionError".format( t, group)) except requests.ReadTimeout as e: logging.error("Pushing {} to group: {} timeout".format( t, group)) except Exception as e: traceback.print_exc() logging.error( "Error at pushing crawled weibo to {}: {}".format( group, e)) logging.info("crawled {} of {}".format(t.itemid, t.owner)) else: logging.error("Error at crawling weibo:{}".format( jdata.get("ok", "NULL"))) return
def _make_join_request(self): async_to_sync(self.channel_layer.group_send)(self.get_author_group(), { 'type': 'response.to.join.request', 'cache_key': self.cache_key, 'nickname': self.nickname })
def send_notification(user, content): group_name = 'user_%s' % user.id channel_layer = get_channel_layer() async_to_sync(channel_layer.group_send)(group_name, content)
def connect(self): self.accept() async_to_sync(self.channel_layer.group_add)("admin", self.channel_name) # Send all orders. self.orders_list()
def disconnect(self, close_code): async_to_sync(self.channel_layer.group_discard)(self.groupname, self.channel_name)
def connect(self): self.room_name = "ServerCams" self.room_group_name = "ServerCam" async_to_sync(self.channel_layer.group_add)(self.room_group_name, self.channel_name) self.accept()
def connect(self): self.groupname = "likes" self.accept() async_to_sync(self.channel_layer.group_add)(self.groupname, self.channel_name)
def notify_dispatcher_finish(self): """Notify dispatcher that the processing is finished. See ``notify_dispatcher_abort`` for message format. """ async_to_sync(self.notify_dispatcher_finish_async)()