def send_message_auto_complete_processor(request): """ Adds completions for the expression auto-completion to the request context """ completions = [] user = request.user org = None if hasattr(user, "get_org"): org = request.user.get_org() if org: completions.append(dict(name="contact", display=str(_("Contact Name")))) completions.append( dict(name="contact.first_name", display=str(_("Contact First Name")))) completions.append( dict(name="contact.groups", display=str(_("Contact Groups")))) completions.append( dict(name="contact.language", display=str(_("Contact Language")))) completions.append( dict(name="contact.name", display=str(_("Contact Name")))) completions.append( dict(name="contact.tel", display=str(_("Contact Phone")))) completions.append( dict(name="contact.tel_e164", display=str(_("Contact Phone - E164")))) completions.append( dict(name="contact.uuid", display=str(_("Contact UUID")))) completions.append( dict(name="date", display=str(_("Current Date and Time")))) completions.append( dict(name="date.now", display=str(_("Current Date and Time")))) completions.append( dict(name="date.today", display=str(_("Current Date")))) completions.append( dict(name="date.tomorrow", display=str(_("Tomorrow's Date")))) completions.append( dict(name="date.yesterday", display=str(_("Yesterday's Date")))) for scheme, label in ContactURN.SCHEME_CHOICES: if scheme != TEL_SCHEME and scheme in org.get_schemes( Channel.ROLE_SEND): completions.append( dict(name="contact.%s" % scheme, display=str(_("Contact %s" % label)))) for field in org.contactfields(manager="user_fields").filter( is_active=True).order_by("label"): display = str(_("Contact Field: %(label)s")) % { "label": field.label } completions.append( dict(name="contact.%s" % str(field.key), display=display)) function_completions = get_function_listing() return dict(completions=json.dumps(completions), function_completions=json.dumps(function_completions))
def test_claim(self, mock_post): url = reverse("channels.types.viber_public.claim") self.login(self.admin) # check that claim page URL appears on claim list page response = self.client.get(reverse("channels.channel_claim")) self.assertContains(response, url) # try submitting with invalid token mock_post.return_value = MockResponse(400, json.dumps({"status": 3, "status_message": "Invalid token"})) response = self.client.post(url, {"auth_token": "invalid"}) self.assertEqual(response.status_code, 200) self.assertContains(response, "Error validating authentication token") # ok this time claim with a success mock_post.side_effect = [ MockResponse(200, json.dumps({"status": 0, "status_message": "ok", "id": "viberId", "uri": "viberName"})), MockResponse(200, json.dumps({"status": 0, "status_message": "ok", "id": "viberId", "uri": "viberName"})), MockResponse(200, json.dumps({"status": 0, "status_message": "ok"})), ] self.client.post(url, {"auth_token": "123456"}, follow=True) # assert our channel got created channel = Channel.objects.get(address="viberId") self.assertEqual(channel.config["auth_token"], "123456") self.assertEqual(channel.name, "viberName") self.assertTrue(channel.get_type().has_attachment_support(channel)) # should have been called with our webhook URL self.assertEqual(mock_post.call_args[0][0], "https://chatapi.viber.com/pa/set_webhook")
def fix_flow(self, flow, preview: bool) -> bool: original = flow.get_definition() definition = deepcopy(original) for fixer in fixers: fixer(definition) old_lines = json.dumps(original, indent=2).splitlines(keepends=True) new_lines = json.dumps(definition, indent=2).splitlines(keepends=True) diff_lines = list( unified_diff(old_lines, new_lines, fromfile="original", tofile="fixed")) if diff_lines: for line in diff_lines: self.stdout.write(line, ending="") if not preview: new_rev, issues = flow.save_revision(None, definition) self.stdout.write( f" > new revision ({new_rev.revision}) saved for flow '{flow.name}'" ) return True else: return False
def refresh_access_token(self, channel_id): r = get_redis_connection() lock_name = self.TOKEN_REFRESH_LOCK % self.channel_uuid if not r.get(lock_name): with r.lock(lock_name, timeout=30): key = self.TOKEN_STORE_KEY % self.channel_uuid post_data = dict(grant_type="client_credentials", client_id=self.app_id, client_secret=self.app_secret) url = self.TOKEN_URL event = HttpEvent("POST", url, json.dumps(post_data)) start = time.time() response = self._request(url, post_data, access_token=None) event.status_code = response.status_code if response.status_code != 200: event.response_body = response.content ChannelLog.log_channel_request( channel_id, "Got non-200 response from %s" % self.API_NAME, event, start, True ) return response_json = response.json() event.response_body = json.dumps(response_json) ChannelLog.log_channel_request( channel_id, "Successfully fetched access token from %s" % self.API_NAME, event, start ) access_token = response_json["access_token"] expires = response_json.get("expires_in", 7200) r.set(key, access_token, ex=int(expires)) return access_token
def _request(self, endpoint, payload=None, files=None, post=True, encode_json=False, returns_json=True): if logger.isEnabledFor(logging.DEBUG): # pragma: no cover logger.debug("=============== %s request ===============" % endpoint) logger.debug(json.dumps(payload, indent=2)) logger.debug("=============== /%s request ===============" % endpoint) headers = self.headers.copy() if files: kwargs = dict(data=payload, files=files) elif encode_json: # do the JSON encoding ourselves - required when the json is something we've loaded with our decoder # which could contain non-standard types headers["Content-Type"] = "application/json" kwargs = dict(data=json.dumps(payload)) else: kwargs = dict(json=payload) req_fn = requests.post if post else requests.get response = req_fn("%s/mr/%s" % (self.base_url, endpoint), headers=headers, **kwargs) return_val = response.json() if returns_json else response.content if logger.isEnabledFor(logging.DEBUG): # pragma: no cover logger.debug("=============== %s response ===============" % endpoint) logger.debug(return_val) logger.debug("=============== /%s response ===============" % endpoint) if response.status_code == 422: raise FlowValidationException(endpoint, payload, return_val) if 400 <= response.status_code < 500: raise MailroomException(endpoint, payload, return_val) response.raise_for_status() return return_val
def compare_contact(self, contact_uuid: str): db_contact = Contact.objects.filter(uuid=contact_uuid, is_active=True).first() if not db_contact: raise CommandError("No such contact") as_json = { "id": db_contact.id, "name": db_contact.name, "language": db_contact.language, "status": db_contact.status, "tickets": db_contact.ticket_count, "is_active": db_contact.is_active, "created_on": db_contact.created_on, "modified_on": db_contact.modified_on, "last_seen_on": db_contact.last_seen_on, "urns": [{"scheme": u.scheme, "path": u.path} for u in db_contact.urns.all()], "fields": db_contact.fields, "groups": [str(g.uuid) for g in db_contact.all_groups.all()], "flow": str(db_contact.current_flow.uuid) if db_contact.current_flow else None, } self.stdout.write("========================= DB =========================") self.stdout.write(json.dumps(as_json, indent=2)) es_contact = self.get_es_contact(contact_uuid) self.stdout.write("========================= ES =========================") self.stdout.write(json.dumps(es_contact, indent=2))
def _request(self, endpoint, payload): if logger.isEnabledFor(logging.DEBUG): # pragma: no cover logger.debug("=============== %s request ===============" % endpoint) logger.debug(json.dumps(payload, indent=2)) logger.debug("=============== /%s request ===============" % endpoint) response = requests.post("%s/mr/%s" % (self.base_url, endpoint), json=payload, headers=self.headers) resp_json = response.json() if logger.isEnabledFor(logging.DEBUG): # pragma: no cover logger.debug("=============== %s response ===============" % endpoint) logger.debug(json.dumps(resp_json, indent=2)) logger.debug("=============== /%s response ===============" % endpoint) if response.status_code == 422: raise FlowValidationException(endpoint, payload, resp_json) if 400 <= response.status_code < 500: raise MailroomException(endpoint, payload, resp_json) response.raise_for_status() return resp_json
def setUp(self): responses.add( responses.GET, "https://api.github.com/repos/nyaruka/posm-extracts/git/trees/master", body=json.dumps({"tree": [{ "path": "geojson", "sha": "the-sha" }]}), content_type="application/json", ) responses.add( responses.GET, "https://api.github.com/repos/nyaruka/posm-extracts/git/trees/the-sha", body=json.dumps({ "tree": [{ "path": "R12345_simplified.json" }, { "path": "R45678_simplified.json" }] }), content_type="application/json", ) responses.add( responses.GET, "https://raw.githubusercontent.com/nyaruka/posm-extracts/master/geojson/R12345_simplified.json", body="the-relation-json", content_type="application/json", ) self.testdir = tempfile.mkdtemp()
def form_valid(self, form): self.form = form user = self.request.user org = user.get_org() simulation = self.request.GET.get("simulation", "false") == "true" omnibox = self.form.cleaned_data["omnibox"] has_schedule = self.form.cleaned_data["schedule"] step_uuid = self.form.cleaned_data.get("step_node", None) text = self.form.cleaned_data["text"] groups = list(omnibox["groups"]) contacts = list(omnibox["contacts"]) urns = list(omnibox["urns"]) if step_uuid: from .tasks import send_to_flow_node get_params = {k: v for k, v in self.request.GET.items()} get_params.update({"s": step_uuid}) send_to_flow_node.delay(org.pk, user.pk, text, **get_params) if "_format" in self.request.GET and self.request.GET["_format"] == "json": return HttpResponse(json.dumps(dict(status="success")), content_type="application/json") else: return HttpResponseRedirect(self.get_success_url()) # if simulating only use the test contact if simulation: groups = [] urns = [] for contact in contacts: if contact.is_test: contacts = [contact] break schedule = Schedule.objects.create(created_by=user, modified_by=user) if has_schedule else None broadcast = Broadcast.create( org, user, text, groups=groups, contacts=contacts, urns=urns, schedule=schedule, status=QUEUED ) if not has_schedule: self.post_save(broadcast) super().form_valid(form) analytics.track( self.request.user.username, "temba.broadcast_created", dict(contacts=len(contacts), groups=len(groups), urns=len(urns)), ) if "_format" in self.request.GET and self.request.GET["_format"] == "json": data = dict(status="success", redirect=reverse("msgs.broadcast_schedule_read", args=[broadcast.pk])) return HttpResponse(json.dumps(data), content_type="application/json") else: if self.form.cleaned_data["schedule"]: return HttpResponseRedirect(reverse("msgs.broadcast_schedule_read", args=[broadcast.pk])) return HttpResponseRedirect(self.get_success_url())
def form_valid(self, form): self.form = form user = self.request.user org = user.get_org() omnibox = self.form.cleaned_data["omnibox"] has_schedule = self.form.cleaned_data["schedule"] step_uuid = self.form.cleaned_data.get("step_node", None) text = self.form.cleaned_data["text"] groups = list(omnibox["groups"]) contacts = list(omnibox["contacts"]) urns = list(omnibox["urns"]) if step_uuid: from .tasks import send_to_flow_node get_params = {k: v for k, v in self.request.GET.items()} get_params.update({"s": step_uuid}) send_to_flow_node.delay(org.pk, user.pk, text, **get_params) if "_format" in self.request.GET and self.request.GET["_format"] == "json": return HttpResponse(json.dumps(dict(status="success")), content_type="application/json") else: return HttpResponseRedirect(self.get_success_url()) schedule = Schedule.create_blank_schedule(org, user) if has_schedule else None broadcast = Broadcast.create( org, user, text, groups=groups, contacts=contacts, urns=urns, schedule=schedule, status=QUEUED, template_state=Broadcast.TEMPLATE_STATE_UNEVALUATED, ) if not has_schedule: self.post_save(broadcast) super().form_valid(form) analytics.track( self.request.user.username, "temba.broadcast_created", dict(contacts=len(contacts), groups=len(groups), urns=len(urns)), ) if "_format" in self.request.GET and self.request.GET["_format"] == "json": data = dict(status="success", redirect=reverse("msgs.broadcast_schedule_read", args=[broadcast.pk])) return HttpResponse(json.dumps(data), content_type="application/json") else: if self.form.cleaned_data["schedule"]: return HttpResponseRedirect(reverse("msgs.broadcast_schedule_read", args=[broadcast.pk])) return HttpResponseRedirect(self.get_success_url())
def test_new_conversation_triggers(self): flow = self.create_flow() with patch("requests.post") as mock_post: mock_post.return_value = MockResponse( 200, json.dumps({"success": True})) trigger = Trigger.create(self.org, self.admin, Trigger.TYPE_NEW_CONVERSATION, flow, channel=self.channel) mock_post.assert_called_once_with( "https://graph.facebook.com/v12.0/me/messenger_profile", json={"get_started": { "payload": "get_started" }}, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock() with patch("requests.delete") as mock_post: mock_post.return_value = MockResponse( 200, json.dumps({"success": True})) trigger.archive(self.admin) mock_post.assert_called_once_with( "https://graph.facebook.com/v12.0/me/messenger_profile", json={"fields": ["get_started"]}, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock() with patch("requests.post") as mock_post: mock_post.return_value = MockResponse( 200, json.dumps({"success": True})) trigger.restore(self.admin) mock_post.assert_called_once_with( "https://graph.facebook.com/v12.0/me/messenger_profile", json={"get_started": { "payload": "get_started" }}, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock()
def post_transferto_api_response(cls, login, token, airtime_obj=None, **kwargs): if not settings.SEND_AIRTIME: raise Exception( "!! Skipping Airtime Transfer, SEND_AIRTIME set to False") key = str(int(time.time() * 1000)) md5 = hashlib.md5() md5.update(force_bytes(login + token + key)) md5 = md5.hexdigest() data = kwargs data.update(dict(login=login, key=key, md5=md5)) response = requests.post(cls.TRANSFERTO_AIRTIME_API_URL, data) if airtime_obj is not None: airtime_obj.data += json.dumps( data, indent=2) + AirtimeTransfer.LOG_DIVIDER airtime_obj.response += response.text + AirtimeTransfer.LOG_DIVIDER airtime_obj.save() return response
def push_task(org, queue, task_name, args, priority=DEFAULT_PRIORITY): """ Adds a task to queue_name with the supplied arguments. Ex: push_task(nyaruka, 'flows', 'start_flow', [1,2,3,4,5,6,7,8,9,10]) """ r = get_redis_connection("default") # calculate our score from the current time and priority, this could get us in trouble # if things are queued for more than ~100 days, but otherwise gives us the properties of prioritizing # first based on priority, then insertion order. score = time.time() + priority # push our task onto the right queue and make sure it is in the active list (atomically) with r.pipeline() as pipe: org_id = org if isinstance(org, int) else org.id pipe.zadd("%s:%d" % (task_name, org_id), score, json.dumps(args)) # and make sure this key is in our list of queues so this job will get worked on pipe.zincrby("%s:active" % task_name, org_id, 0) pipe.execute() # if we were given a queue to schedule on, then add this task to celery. # # note that the task that is fired needs no arguments as it should just use pop_task with the # task name to determine what to work on. if queue: if getattr(settings, "CELERY_ALWAYS_EAGER", False): task_function = lookup_task_function(task_name) task_function() else: # pragma: needs cover current_app.send_task(task_name, args=[], kwargs={}, queue=queue)
def update_field_locally(user, contact, key, value, label=None): field = ContactField.get_or_create(contact.org, user, key, label=label) field_uuid = str(field.uuid) if contact.fields is None: contact.fields = {} if not value: value = None if field_uuid in contact.fields: del contact.fields[field_uuid] else: field_dict = contact.serialize_field(field, value) if contact.fields.get(field_uuid) != field_dict: contact.fields[field_uuid] = field_dict # update our JSONB on our contact with connection.cursor() as cursor: if value is None: # delete the field cursor.execute("UPDATE contacts_contact SET fields = fields - %s WHERE id = %s", [field_uuid, contact.id]) else: # update the field cursor.execute( "UPDATE contacts_contact SET fields = COALESCE(fields,'{}'::jsonb) || %s::jsonb WHERE id = %s", [json.dumps({field_uuid: contact.fields[field_uuid]}), contact.id], )
def render_to_response(self, context, **response_kwargs): results = [{ "id": lb.uuid, "text": lb.name } for lb in context["object_list"]] return HttpResponse(json.dumps(results), content_type="application/json")
def settings(self, webhook_url: str): response = self.put(f"{self.base_url}/settings", data=json.dumps({"webhook": { "url": webhook_url }})) if response.status_code != 204: raise ClientError(response=response)
def test_claim(self, mock_get): url = reverse("channels.types.firebase.claim") self.login(self.admin) # check that claim page URL appears on claim list page response = self.client.get(reverse("channels.channel_claim")) self.assertContains(response, url) mock_get.return_value = MockResponse( 200, json.dumps({"title": "FCM Channel", "key": "abcde12345", "send_notification": "True"}) ) response = self.client.post( url, {"title": "FCM Channel", "key": "abcde12345", "send_notification": "True"}, follow=True ) channel = Channel.objects.get(address="abcde12345") self.assertRedirects(response, reverse("channels.channel_configuration", args=[channel.uuid])) self.assertEqual(channel.channel_type, "FCM") self.assertEqual( channel.config, {"FCM_KEY": "abcde12345", "FCM_TITLE": "FCM Channel", "FCM_NOTIFICATION": True} ) response = self.client.get(reverse("channels.channel_configuration", args=[channel.uuid])) self.assertContains(response, reverse("courier.fcm", args=[channel.uuid, "receive"])) self.assertContains(response, reverse("courier.fcm", args=[channel.uuid, "register"]))
def start_call(self, call, to, from_, status_callback): if not settings.SEND_CALLS: raise ValueError("SEND_CALLS set to False, skipping call start") params = dict(to=to, from_=call.channel.address, url=status_callback, status_callback=status_callback) try: twilio_call = self.api.calls.create(**params) call.external_id = str(twilio_call.sid) # the call was successfully sent to the IVR provider call.status = IVRCall.WIRED call.save() for event in self.events: ChannelLog.log_ivr_interaction(call, "Started call", event) except TwilioRestException as twilio_error: message = "Twilio Error: %s" % twilio_error.msg if twilio_error.code == 20003: message = _("Could not authenticate with your Twilio account. Check your token and try again.") event = HttpEvent("POST", "https://api.nexmo.com/v1/calls", json.dumps(params), response_body=str(message)) ChannelLog.log_ivr_interaction(call, "Call start failed", event, is_error=True) call.status = IVRCall.FAILED call.save() raise IVRException(message)
def _queue_task(pipe, org_id, queue, task_type, task, priority): """ Queues a task to mailroom Args: pipe: an open redis pipe org_id: the id of the org for this task queue: the queue the task should be added to task_type: the type of the task task: the task definition priority: the priority of this task """ # our score is the time in milliseconds since epoch + any priority modifier score = int(round(time.time() * 1000)) + priority # create our payload payload = _create_mailroom_task(org_id, task_type, task) org_queue = QUEUE_PATTERN % (queue, org_id) active_queue = ACTIVE_PATTERN % queue # push onto our org queue pipe.zadd(org_queue, {json.dumps(payload): score}) # and mark that org as active pipe.zincrby(active_queue, 0, org_id)
def start_call(self, call, to, from_, status_callback): if not settings.SEND_CALLS: raise ValueError("SEND_CALLS set to False, skipping call start") url = "https://%s%s" % (self.org.get_brand_domain(), reverse("ivr.ivrcall_handle", args=[call.pk])) params = dict() params["answer_url"] = [url] params["answer_method"] = "POST" params["to"] = [dict(type="phone", number=to.strip("+"))] params["from"] = dict(type="phone", number=from_.strip("+")) params["event_url"] = ["%s?has_event=1" % url] params["event_method"] = "POST" try: response = self.create_call(params=params) call_uuid = response.get("uuid", None) call.external_id = str(call_uuid) # the call was successfully sent to the IVR provider call.status = IVRCall.WIRED call.save() for event in self.events: ChannelLog.log_ivr_interaction(call, "Started call", event) except Exception as e: event = HttpEvent("POST", "https://api.nexmo.com/v1/calls", json.dumps(params), response_body=str(e)) ChannelLog.log_ivr_interaction(call, "Call start failed", event, is_error=True) call.status = IVRCall.FAILED call.save() raise IVRException(_("Nexmo call failed, with error %s") % str(e))
def __init__(self, status_code: int, body, method="GET", url="http://foo.com/", headers=None): if headers is None: headers = {} # convert dictionaries to json if the body is passed that way if isinstance(body, dict): body = json.dumps(body) self.body = force_text(body) self.text = self.body self.content = force_bytes(self.body) self.status_code = status_code self.headers = CaseInsensitiveDict(data=headers) self.url = url self.ok = True self.cookies = dict() self.streaming = False self.charset = "utf-8" self.connection = dict() self.raw = dict_to_struct("MockRaw", dict(version="1.1", status=status_code, headers=headers)) self.reason = "" # mock up a request object on our response as well self.request = dict_to_struct( "MockRequest", dict(method=method, url=url, body="request body", headers=headers) )
def serialize_run(run): serialized = { "uuid": str(run.uuid), "status": "completed" if run.exited_on else "active", "created_on": run.created_on.isoformat(), "exited_on": run.exited_on.isoformat() if run.exited_on else None, "expires_on": run.expires_on.isoformat() if run.expires_on else None, "flow": {"uuid": str(run.flow.uuid), "name": run.flow.name}, "path": run.path, } if run.results: serialized["results"] = run.results if run.events: serialized["events"] = run.events if run.parent_id and run.parent.contact == run.contact: serialized["parent_uuid"] = str(run.parent.uuid) msg_in = run.get_last_msg() if msg_in: serialized["input"] = serialize_input(msg_in) # things in @extra might not have come from a webhook call but at least they'll be accessible if we make one if run.fields: payload = json.dumps(run.fields) serialized["webhook"] = { "request": "GET / HTTP/1.1\r\nHost: fakewebhooks.com\r\nUser-Agent: goflow-trials\r\n\r\n", "response": "HTTP/1.1 200 OK\r\nContent-Type: application/json; charset=utf-8\r\n\r\n" + payload, "status": "success", "status_code": 200, "url": "http://fakewebhooks.com/", } return serialized
def form_invalid(self, form): if "_format" in self.request.GET and self.request.GET["_format"] == "json": # pragma: needs cover return HttpResponse( json.dumps(dict(status="error", errors=form.errors)), content_type="application/json", status=400 ) else: return super().form_invalid(form)
def test_release(self, mock_delete): mock_delete.return_value = MockResponse(200, json.dumps({"success": True})) self.channel.release() mock_delete.assert_called_once_with( "https://graph.facebook.com/v2.12/me/subscribed_apps", params={"access_token": "09876543"} )
def form_valid(self, form): from .type import ZendeskType subdomain = form.cleaned_data["subdomain"] secret = form.cleaned_data["secret"] ticketer = Ticketer.objects.get(ticketer_type=ZendeskType.slug, config__subdomain=subdomain, config__secret=secret, is_active=True) # update ticketer config with push credentials we've been given ticketer.config[ ZendeskType.CONFIG_PUSH_ID] = form.cleaned_data["instance_push_id"] ticketer.config[ZendeskType.CONFIG_PUSH_TOKEN] = form.cleaned_data[ "zendesk_access_token"] ticketer.modified_on = timezone.now() ticketer.save(update_fields=("config", "modified_on")) # go to special return view which redirects back to Zendesk as POST context = { "return_url": form.cleaned_data["return_url"], "name": form.cleaned_data["name"], "metadata": json.dumps({ "ticketer": str(ticketer.uuid), "secret": secret }), } return TemplateResponse(request=self.request, template=self.return_template, context=context)
def __init__(self, records: List[Dict], max_payload_size: int = 256): # serialize records as a JSONL payload buffer = io.BytesIO() for record in records: buffer.write(json.dumps(record).encode("utf-8")) buffer.write(b"\n") payload = buffer.getvalue() payload_chunks = chunk_list(payload, size=max_payload_size) self.events = [{ "Records": { "Payload": chunk } } for chunk in payload_chunks] self.events.append( { "Stats": { "Details": { "BytesScanned": 123, "BytesProcessed": 234, "BytesReturned": len(payload) } } }, ) self.events.append({"End": {}})
def test_new_conversation_triggers(self): flow = self.create_flow() with patch("requests.post") as mock_post: mock_post.return_value = MockResponse( 200, json.dumps({"success": True})) trigger = Trigger.create(self.org, self.admin, Trigger.TYPE_NEW_CONVERSATION, flow, channel=self.channel) mock_post.assert_called_once_with( "https://graph.facebook.com/v3.3/12345/thread_settings", json={ "setting_type": "call_to_actions", "thread_state": "new_thread", "call_to_actions": [{ "payload": "get_started" }], }, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock() trigger.archive(self.admin) mock_post.assert_called_once_with( "https://graph.facebook.com/v3.3/12345/thread_settings", json={ "setting_type": "call_to_actions", "thread_state": "new_thread", "call_to_actions": [] }, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock() trigger.restore(self.admin) mock_post.assert_called_once_with( "https://graph.facebook.com/v3.3/12345/thread_settings", json={ "setting_type": "call_to_actions", "thread_state": "new_thread", "call_to_actions": [{ "payload": "get_started" }], }, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock()
def put_jsonl(self, bucket, key, records): stream = io.BytesIO() gz = gzip.GzipFile(fileobj=stream, mode="wb") for record in records: gz.write(json.dumps(record).encode("utf-8")) gz.write(b"\n") gz.close() self.objects[(bucket, key)] = stream
def assertStatus(sms, event_type, assert_status): data["event_type"] = event_type response = self.client.post( reverse("handlers.junebug_handler", args=["event", self.channel.uuid]), data=json.dumps(data), content_type="application/json", ) self.assertEqual(200, response.status_code) sms = Msg.objects.get(pk=sms.id) self.assertEqual(assert_status, sms.status)
def _request(self, endpoint, payload): if logger.isEnabledFor(logging.DEBUG): # pragma: no cover logger.debug("=============== %s request ===============" % endpoint) logger.debug(json.dumps(payload, indent=2)) logger.debug("=============== /%s request ===============" % endpoint) response = requests.post("%s/mr/%s" % (self.base_url, endpoint), json=payload, headers=self.headers) resp_json = response.json() if logger.isEnabledFor(logging.DEBUG): # pragma: no cover logger.debug("=============== %s response ===============" % endpoint) logger.debug(json.dumps(resp_json, indent=2)) logger.debug("=============== /%s response ===============" % endpoint) if 400 <= response.status_code < 500: raise MailroomException(endpoint, payload, resp_json) response.raise_for_status() return resp_json
def _request(self, endpoint, payload): if self.debug: print("[GOFLOW]=============== %s request ===============" % endpoint) print(json.dumps(payload, indent=2)) print("[GOFLOW]=============== /%s request ===============" % endpoint) response = requests.post("%s/flow/%s" % (self.base_url, endpoint), json=payload, headers=self.headers) resp_json = response.json() if self.debug: print("[GOFLOW]=============== %s response ===============" % endpoint) print(json.dumps(resp_json, indent=2)) print("[GOFLOW]=============== /%s response ===============" % endpoint) if 400 <= response.status_code < 500: raise FlowServerException(endpoint, payload, resp_json) response.raise_for_status() return resp_json
def setUp(self): responses.add( responses.GET, "https://api.github.com/repos/nyaruka/posm-extracts/git/trees/master", body=json.dumps({"tree": [{"path": "geojson", "sha": "the-sha"}]}), content_type="application/json", ) responses.add( responses.GET, "https://api.github.com/repos/nyaruka/posm-extracts/git/trees/the-sha", body=json.dumps({"tree": [{"path": "R12345_simplified.json"}, {"path": "R45678_simplified.json"}]}), content_type="application/json", ) responses.add( responses.GET, "https://raw.githubusercontent.com/nyaruka/posm-extracts/master/geojson/R12345_simplified.json", body="the-relation-json", content_type="application/json", ) self.testdir = tempfile.mkdtemp()
def test_release(self, mock_post): mock_post.side_effect = [ MockResponse(200, json.dumps({ "status": 0, "status_message": "ok" })) ] self.channel.release() self.assertEqual(mock_post.call_args[0][0], "https://chatapi.viber.com/pa/set_webhook")
def jsonlgz_encode(records: list) -> tuple: stream = io.BytesIO() wrapper = FileAndHash(stream) gz = gzip.GzipFile(fileobj=wrapper, mode="wb") for record in records: gz.write(json.dumps(record).encode("utf-8")) gz.write(b"\n") gz.close() return stream, wrapper.hash.hexdigest(), wrapper.size
def test_receive_ussd_no_session(self): from temba.channels.handlers import JunebugUSSDHandler # Delete the trigger to prevent the sesion from being created self.trigger.delete() data = self.mk_ussd_msg(content="événement", to=self.starcode) callback_url = reverse("handlers.junebug_handler", args=["inbound", self.channel.uuid]) response = self.client.post(callback_url, json.dumps(data), content_type="application/json") self.assertEqual(response.status_code, 400) self.assertEqual(response.json()["status"], JunebugUSSDHandler.NACK)
def test_receive_with_session_id(self): from temba.ussd.models import USSDSession data = self.mk_ussd_msg(content="événement", session_id="session-id", to=self.starcode) callback_url = reverse("handlers.junebug_handler", args=["inbound", self.channel.uuid]) self.client.post(callback_url, json.dumps(data), content_type="application/json") # load our message inbound_msg, outbound_msg = Msg.objects.all().order_by("pk") self.assertEqual(outbound_msg.connection.status, USSDSession.TRIGGERED) self.assertEqual(outbound_msg.connection.external_id, "session-id") self.assertEqual(inbound_msg.connection.external_id, "session-id")
def get_db_prep_value(self, value, *args, **kwargs): # if the value is falsy we will save is as null if self.null and value in (None, {}, []) and not kwargs.get("force"): return None if value is None: return None if type(value) not in (list, dict, OrderedDict): raise ValueError("JSONAsTextField should be a dict or a list, got %s => %s" % (type(value), value)) return json.dumps(value)
def post(self, request, *args, **kwargs): user = self.request.user org = user.get_org() form = MsgActionForm(self.request.POST, org=org, user=user) if form.is_valid(): response = form.execute() # shouldn't get in here in normal operation if response and "error" in response: # pragma: no cover return HttpResponse(json.dumps(response), content_type="application/json", status=400) return self.get(request, *args, **kwargs)
def send_message_auto_complete_processor(request): """ Adds completions for the expression auto-completion to the request context """ completions = [] user = request.user org = None if hasattr(user, "get_org"): org = request.user.get_org() if org: completions.append(dict(name="contact", display=str(_("Contact Name")))) completions.append(dict(name="contact.first_name", display=str(_("Contact First Name")))) completions.append(dict(name="contact.groups", display=str(_("Contact Groups")))) completions.append(dict(name="contact.language", display=str(_("Contact Language")))) completions.append(dict(name="contact.name", display=str(_("Contact Name")))) completions.append(dict(name="contact.tel", display=str(_("Contact Phone")))) completions.append(dict(name="contact.tel_e164", display=str(_("Contact Phone - E164")))) completions.append(dict(name="contact.uuid", display=str(_("Contact UUID")))) completions.append(dict(name="date", display=str(_("Current Date and Time")))) completions.append(dict(name="date.now", display=str(_("Current Date and Time")))) completions.append(dict(name="date.today", display=str(_("Current Date")))) completions.append(dict(name="date.tomorrow", display=str(_("Tomorrow's Date")))) completions.append(dict(name="date.yesterday", display=str(_("Yesterday's Date")))) for scheme, label in ContactURN.SCHEME_CHOICES: if scheme != TEL_SCHEME and scheme in org.get_schemes(Channel.ROLE_SEND): completions.append(dict(name="contact.%s" % scheme, display=str(_("Contact %s" % label)))) for field in org.contactfields(manager="user_fields").filter(is_active=True).order_by("label"): display = str(_("Contact Field: %(label)s")) % {"label": field.label} completions.append(dict(name="contact.%s" % str(field.key), display=display)) function_completions = get_function_listing() return dict(completions=json.dumps(completions), function_completions=json.dumps(function_completions))
def __str__(self): object_len = len(self.document) for idx in range(object_len): action_dict = self.document[idx] if action_dict["action"] in ["talk", "stream"]: if idx == object_len - 1: self.document[idx]["bargeIn"] = False elif idx <= object_len - 2: next_action_dict = self.document[idx + 1] if next_action_dict["action"] != "input": self.document[idx]["bargeIn"] = False return json.dumps(self.document)
def push_courier_msgs(channel, msgs, high_priority=False): """ Adds the passed in msgs to our courier queue for channel """ r = get_redis_connection("default") priority = COURIER_HIGH_PRIORITY if high_priority else COURIER_LOW_PRIORITY tps = channel.tps if channel.tps else COURIER_DEFAULT_TPS # create our payload payload = [] for msg in msgs: payload.append(msg_as_task(msg)) # call our lua script get_script(r)(keys=(time.time(), "msgs", channel.uuid, tps, priority, json.dumps(payload)), client=r)
def get_cacheable(cache_key, callable, r=None, force_dirty=False): """ Gets the result of a method call, using the given key and TTL as a cache """ if not r: r = get_redis_connection() if not force_dirty: cached = r.get(cache_key) if cached is not None: return json.loads(force_text(cached)) (calculated, cache_ttl) = callable() r.set(cache_key, json.dumps(calculated), cache_ttl) return calculated
def get_json(self, value): if "user" not in self.__dict__: # pragma: no cover raise ValueError( "Omnibox requires a user, make sure you set one using field.set_user(user) in your form.__init__" ) objects = OmniboxWidget.get_objects_spec(value, self.user) selected = [] for group in objects["groups"]: selected.append(dict(text=group.name, id="g-%s" % group.uuid, contacts=group.contacts.count())) for contact in objects["contacts"]: selected.append(dict(text=str(contact), id="c-%s" % contact.uuid)) return json.dumps(selected) if selected else None
def queue_mailroom_task(org_id, queue, task_type, task, priority): """ Adds the passed in task to the proper mailroom queue """ # our score is the time in milliseconds since epoch + any priority modifier score = int(round(time.time() * 1000)) + priority r = get_redis_connection("default") # create our payload payload = json.dumps({"type": task_type, "org_id": org_id, "task": task}) orgQueue = QUEUE_PATTERN % (queue, org_id) activeQueue = ACTIVE_PATTERN % queue # push onto each r.zadd(orgQueue, score, payload) r.zincrby(activeQueue, org_id, 0)
def test_receive_ussd(self): from temba.ussd.models import USSDSession from temba.channels.handlers import JunebugUSSDHandler data = self.mk_ussd_msg(content="événement", to=self.starcode) callback_url = reverse("handlers.junebug_handler", args=["inbound", self.channel.uuid]) response = self.client.post(callback_url, json.dumps(data), content_type="application/json") self.assertEqual(response.status_code, 200) self.assertEqual(response.json()["status"], JunebugUSSDHandler.ACK) # load our message inbound_msg, outbound_msg = Msg.objects.all().order_by("pk") self.assertEqual(data["from"], outbound_msg.contact.get_urn(TEL_SCHEME).path) self.assertEqual(outbound_msg.response_to, inbound_msg) self.assertEqual(outbound_msg.connection.status, USSDSession.TRIGGERED) self.assertEqual(inbound_msg.direction, INCOMING) self.assertEqual(inbound_msg.status, HANDLED)
def get_history(self): history = dict( name = self.campaign.name, uuid = self.campaign.uuid, group = dict(uuid = self.campaign.group.uuid, name = self.campaign.group.name), events = [dict(uuid = self.uuid, offset = self.offset, unit = self.unit, event_type = self.event_type, delivery_hour = self.delivery_hour, message = self.message, relative_to = dict(label = self.relative_to.label, key = self.relative_to.key), flow = dict(uuid = self.flow.uuid, name = self.flow.name) )], ) return json.dumps(history)
def post(self, request, *args, **kwargs): if request.method == "POST": text = request.POST.get("text", "") else: text = request.GET.get("text", "") if text.lower() == "cu001": response = dict( status="Shipped", order="CU001", name="Ben Haggerty", order_number="PLAT2012", ship_date="October 9th", delivery_date="April 3rd", description="Vogue White Wall x 4", ) elif text.lower() == "cu002": response = dict( status="Pending", order="CU002", name="Ryan Lewis", username="******", ship_date="August 14th", order_number="FLAG13", description="American Flag x 1", ) elif text.lower() == "cu003": response = dict( status="Cancelled", order="CU003", name="R Kelly", username="******", cancel_date="December 2nd", order_number="SHET51", description="Bed Sheets, Queen x 1", ) else: response = dict(status="Invalid") return HttpResponse(json.dumps(response))
def test_new_conversation_triggers(self, mock_post): mock_post.return_value = MockResponse(200, json.dumps({"success": True})) flow = self.create_flow() trigger = Trigger.create(self.org, self.admin, Trigger.TYPE_NEW_CONVERSATION, flow, self.channel) mock_post.assert_called_once_with( "https://graph.facebook.com/v2.12/12345/thread_settings", json={ "setting_type": "call_to_actions", "thread_state": "new_thread", "call_to_actions": [{"payload": "get_started"}], }, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock() trigger.archive(self.admin) mock_post.assert_called_once_with( "https://graph.facebook.com/v2.12/12345/thread_settings", json={"setting_type": "call_to_actions", "thread_state": "new_thread", "call_to_actions": []}, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock() trigger.restore(self.admin) mock_post.assert_called_once_with( "https://graph.facebook.com/v2.12/12345/thread_settings", json={ "setting_type": "call_to_actions", "thread_state": "new_thread", "call_to_actions": [{"payload": "get_started"}], }, headers={"Content-Type": "application/json"}, params={"access_token": "09876543"}, ) mock_post.reset_mock()
def post_transferto_api_response(cls, login, token, airtime_obj=None, **kwargs): if not settings.SEND_AIRTIME: raise Exception("!! Skipping Airtime Transfer, SEND_AIRTIME set to False") key = str(int(time.time() * 1000)) md5 = hashlib.md5() md5.update(force_bytes(login + token + key)) md5 = md5.hexdigest() data = kwargs data.update(dict(login=login, key=key, md5=md5)) response = requests.post(cls.TRANSFERTO_AIRTIME_API_URL, data) if airtime_obj is not None: airtime_obj.data += json.dumps(data, indent=2) + AirtimeTransfer.LOG_DIVIDER airtime_obj.response += response.text + AirtimeTransfer.LOG_DIVIDER airtime_obj.save() return response