def eval_dots_block(xform_json, callback=None): """ Evaluate the dots block in the xform submission and put it in the computed_ block for the xform. """ case_id = get_case_id(xform_json) do_continue = False #first, set the pact_data to json if the dots update stuff is there. try: if xform_json.get(PACT_DOTS_DATA_PROPERTY, {}).has_key('processed'): #already processed, skipping return xform_json[PACT_DOTS_DATA_PROPERTY] = {} if not isinstance(xform_json['form']['case'].get('update', None), dict): #no case update property, skipping pass else: #update is a dict if xform_json['form']['case']['update'].has_key('dots'): dots_json = xform_json['form']['case']['update']['dots'] if isinstance(dots_json, str) or isinstance(dots_json, unicode): json_data = simplejson.loads(dots_json) xform_json[PACT_DOTS_DATA_PROPERTY]['dots'] = json_data do_continue=True else: #no dots data in doc pass xform_json[PACT_DOTS_DATA_PROPERTY]['processed']=datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') XFormInstance.get_db().save_doc(xform_json) except Exception, ex: #if this gets triggered, that's ok because web entry don't got them tb = traceback.format_exc() notify_exception(None, message="PACT error evaluating DOTS block docid %s, %s\n\tTraceback: %s" % (xform_json['_id'], ex, tb))
def start_session_with_error_handling(domain, contact, app, module, form, case_id, keyword, logged_subevent=None): """ Returns (session, responses, error, error_code) """ try: session, responses = start_session(domain, contact, app, module, form, case_id=case_id, yield_responses=True) if logged_subevent: logged_subevent.xforms_session_id = session.pk logged_subevent.save() return (session, responses, False, None) except TouchformsError as e: human_readable_message = e.response_data.get('human_readable_message', None) logged_subevent.error(MessagingEvent.ERROR_TOUCHFORMS_ERROR, additional_error_text=human_readable_message) if touchforms_error_is_config_error(e): error_code = MSG_FORM_ERROR else: notify_exception(None, message=('Could not process structured sms for' 'contact %s, domain %s, keyword %s' % (contact._id, domain, keyword))) error_code = MSG_TOUCHFORMS_ERROR return (None, None, True, error_code)
def check_es_index(): """ Verify that the Case and soon to be added XForm Elastic indices are up to date with what's in couch This code is also called in the HQ admin page as well """ es_status = {} es_status.update(check_es_cluster_health()) es_status.update(check_case_es_index()) es_status.update(check_xform_es_index()) es_status.update(check_reportcase_es_index()) es_status.update(check_reportxform_es_index()) do_notify = False message = [] if es_status[CLUSTER_HEALTH] == 'red': do_notify = True message.append("Cluster health is red - something is up with the ES machine") for index in es_status.keys(): if index == CLUSTER_HEALTH: continue pillow_status = es_status[index] if not pillow_status['status']: do_notify = True message.append( "Elasticsearch %s Index Issue: %s" % (index, es_status[index]['message'])) if do_notify: message.append("This alert can give false alarms due to timing lag, so please double check https://www.commcarehq.org/hq/admin/system/ and the Elasticsarch Status section to make sure.") notify_exception(None, message='\n'.join(message))
def post(self, request): """View's dispatch method automatically calls this""" def error_redirect(): return HttpResponseRedirect(reverse('upload_item_lists', args=[self.domain])) try: workbook = WorkbookJSONReader(request.file) except AttributeError: messages.error(request, "Error processing your Excel (.xlsx) file") return error_redirect() try: self._run_upload(request, workbook) except WorksheetNotFound as e: messages.error(request, "Workbook does not have a sheet called '%s'" % e.title) return error_redirect() except Exception as e: notify_exception(request) messages.error(request, "Fixture upload could not complete due to the following error: %s" % e) return error_redirect() return HttpResponseRedirect(reverse('fixture_view', args=[self.domain]))
def message_test(request, domain, phone_number): if request.method == "POST": message = request.POST.get("message", "") domain_scope = None if request.couch_user.is_superuser else domain try: incoming(phone_number, message, "TEST", domain_scope=domain_scope) except DomainScopeValidationError: messages.error( request, _("Invalid phone number being simulated. You may only " \ "simulate SMS from verified numbers belonging to contacts " \ "in this domain.") ) except Exception: notify_exception(request) messages.error( request, _("An error has occurred. Please try again in a few minutes " \ "and if the issue persists, please contact CommCareHQ " \ "Support.") ) context = get_sms_autocomplete_context(request, domain) context['domain'] = domain context['messagelog'] = SMSLog.by_domain_dsc(domain) context['now'] = datetime.utcnow() tz = report_utils.get_timezone(request.couch_user.user_id, domain) context['timezone'] = tz context['timezone_now'] = datetime.now(tz=tz) context['layout_flush_content'] = True context['phone_number'] = phone_number return render(request, "sms/message_tester.html", context)
def upload_bulk_ui_translations(request, domain, app_id): success = False try: app = get_app(domain, app_id) trans_dict, error_properties = process_ui_translation_upload( app, request.file ) if error_properties: message = _("We found problem with following translations:") message += "<br>" for prop in error_properties: message += "<li>%s</li>" % prop messages.error(request, message, extra_tags='html') else: app.translations = dict(trans_dict) app.save() success = True except Exception: notify_exception(request, 'Bulk Upload Translations Error') messages.error(request, _("Something went wrong! Update failed. We're looking into it")) if success: messages.success(request, _("UI Translations Updated!")) return HttpResponseRedirect(reverse('app_languages', args=[domain, app_id]))
def get_report_content(self): """ Get the report's HTML content as rendered by the static view format. """ try: if self.report is None: return _("The report used to create this scheduled report is no" " longer available on CommCare HQ. Please delete this" " scheduled report and create a new one using an available" " report.") except Exception: pass from django.http import HttpRequest, QueryDict request = HttpRequest() request.couch_user = self.owner request.user = self.owner.get_django_user() request.domain = self.domain request.couch_user.current_domain = self.domain request.GET = QueryDict(self.query_string + '&filterSet=true') try: response = self._dispatcher.dispatch(request, render_as='email', **self.view_kwargs) return json.loads(response.content)['report'] except Exception: notify_exception(None, "Error generating report") return _("An error occurred while generating this report.")
def send_html_email_async(self, subject, recipient, html_content, text_content=None, cc=None, email_from=settings.DEFAULT_FROM_EMAIL, file_attachments=None, bcc=None, smtp_exception_skip_list=None): """ Call with send_HTML_email_async.delay(*args, **kwargs) - sends emails in the main celery queue - if sending fails, retry in 15 min - retry a maximum of 10 times """ try: send_HTML_email(subject, recipient, html_content, text_content=text_content, cc=cc, email_from=email_from, file_attachments=file_attachments, bcc=bcc, smtp_exception_skip_list=smtp_exception_skip_list) except Exception as e: from corehq.util.python_compatibility import soft_assert_type_text if isinstance(recipient, six.string_types): soft_assert_type_text(recipient) recipient = list(recipient) if not isinstance(recipient, six.string_types) else [recipient] notify_exception( None, message="Encountered error while sending email", details={ 'subject': subject, 'recipients': ', '.join(recipient), 'error': e, } ) self.retry(exc=e)
def debug_notify(request): try: 0 / 0 except ZeroDivisionError: notify_exception(request, "If you want to achieve a 500-style email-out but don't want the user to see a 500, use notify_exception(request[, message])") return HttpResponse("Email should have been sent")
def process_incoming_message(*args, **kwargs): try: from corehq.apps.telerivet.views import TELERIVET_INBOUND_FIELD_MAP fields = {a: kwargs[a] for (a, b) in TELERIVET_INBOUND_FIELD_MAP} log = IncomingRequest(**fields) log.save() except Exception as e: notify_exception(None, "Could not save Telerivet log entry") pass backend = TelerivetBackend.by_webhook_secret(kwargs["secret"]) if backend is None: # Ignore the message if the webhook secret is not recognized return if kwargs["from_number_e164"]: from_number = strip_plus(kwargs["from_number_e164"]) else: from_number = strip_plus(kwargs["from_number"]) if kwargs["event"] == EVENT_INCOMING: if kwargs["message_type"] == MESSAGE_TYPE_SMS: incoming_sms(from_number, kwargs["content"], TelerivetBackend.get_api_id()) elif kwargs["message_type"] == MESSAGE_TYPE_CALL: incoming_ivr(from_number, None, "TELERIVET-%s" % kwargs["message_id"], None)
def upload_sms_translations(request, domain): try: workbook = WorkbookJSONReader(request.file) translations = workbook.get_worksheet(title='translations') with StandaloneTranslationDoc.get_locked_obj(domain, "sms") as tdoc: msg_ids = sorted(_MESSAGES.keys()) result = {} for lang in tdoc.langs: result[lang] = {} for row in translations: for lang in tdoc.langs: if row.get(lang): msg_id = row["property"] if msg_id in msg_ids: result[lang][msg_id] = str(row[lang]).strip() tdoc.translations = result tdoc.save() messages.success(request, _("SMS Translations Updated.")) except Exception: notify_exception(request, 'SMS Upload Translations Error') messages.error(request, _("Update failed. We're looking into it.")) return HttpResponseRedirect(reverse('sms_languages', args=[domain]))
def post(self, request, *args, **kwargs): try: export_id = self.commit(request) except Exception as e: if self.is_async: # todo: this can probably be removed as soon as # http://manage.dimagi.com/default.asp?157713 is resolved notify_exception(request, 'problem saving an export! {}'.format(str(e))) response = json_response({ 'error': str(e) or type(e).__name__ }) response.status_code = 500 return response elif isinstance(e, ExportAppException): return HttpResponseRedirect(request.META['HTTP_REFERER']) else: raise else: try: post_data = json.loads(self.request.body) url = self.export_home_url # short circuit to check if the submit is from a create or edit feed # to redirect it to the list view from corehq.apps.export.views.list import DashboardFeedListView, DailySavedExportListView if isinstance(self, DashboardFeedMixin): url = reverse(DashboardFeedListView.urlname, args=[self.domain]) elif post_data['is_daily_saved_export']: url = reverse(DailySavedExportListView.urlname, args=[self.domain]) except ValueError: url = self.export_home_url if self.is_async: return json_response({ 'redirect': url, }) return HttpResponseRedirect(url)
def get(self, request, domain): self.domain = domain self.q = self.request.GET.get('q', None) try: count, options = self.get_options() return self.render_json_response({ 'results': options, 'total': count, }) except ESError as e: if self.q: # Likely caused by an invalid user query # A query that causes this error immediately follows a very # similar query that should be caught by the else clause if it # errors. If that error didn't happen, the error was probably # introduced by the addition of the query_string query, which # contains the user's input. logger.info('ElasticSearch error caused by query "%s": %s', self.q, e) else: # The error was our fault notify_exception(request, e) return self.render_json_response({ 'results': [], 'total': 0, })
def check_es_index(): """ Verify that the Case and soon to be added XForm Elastic indices are up to date with what's in couch This code is also called in the HQ admin page as well """ es_status = {} es_status.update(check_cluster_health()) es_status.update(check_case_index()) es_status.update(check_xform_index()) es_status.update(check_exchange_index()) do_notify = False message = [] if es_status[CLUSTER_HEALTH] == 'red': do_notify=True message.append("Cluster health is red - something is up with the ES machine") for prefix in ['hqcases', 'xforms','cc_exchange']: if es_status.get('%s_status' % prefix, False) == False: do_notify=True message.append("Elasticsearch %s Index Issue: %s" % (prefix, es_status['%s_message' % prefix])) if do_notify: notify_exception(None, message='\n'.join(message))
def domains_for_user(request, selected_domain=None): lst = list() lst.append('<ul class="dropdown-menu nav-list dropdown-orange">') new_domain_url = reverse("registration_domain") if selected_domain == 'public': # viewing the public domain with a different db, so the user's domains can't readily be accessed. lst.append('<li><a href="%s">Back to My Projects...</a></li>' % reverse("domain_select")) lst.append('<li class="divider"></li>') else: try: domain_list = Domain.active_for_user(request.couch_user) except Exception: if settings.DEBUG: raise else: domain_list = Domain.active_for_user(request.user) notify_exception(request) if len(domain_list) > 0: lst.append('<li class="nav-header">My Projects</li>') for domain in domain_list: default_url = reverse("domain_homepage", args=[domain.name]) lst.append('<li><a href="%s">%s</a></li>' % (default_url, domain.long_display_name())) else: lst.append('<li class="nav-header">No Projects</li>') lst.append('<li class="divider"></li>') lst.append('<li><a href="%s">New Project...</a></li>' % new_domain_url) lst.append('<li><a href="%s">CommCare Exchange...</a></li>' % reverse("appstore")) lst.append("</ul>") return "".join(lst)
def validate_app(self, existing_errors=None): errors = existing_errors or [] errors.extend(self._check_password_charset()) errors.extend(self._validate_fixtures()) errors.extend(self._validate_intents()) errors.extend(self._validate_practice_users()) try: if not errors: self.app.create_all_files() except CaseXPathValidationError as cve: errors.append({ 'type': 'invalid case xpath reference', 'module': cve.module, 'form': cve.form, }) except UserCaseXPathValidationError as ucve: errors.append({ 'type': 'invalid user property xpath reference', 'module': ucve.module, 'form': ucve.form, }) except (AppEditingError, XFormValidationError, XFormException, PermissionDenied, SuiteValidationError) as e: errors.append({'type': 'error', 'message': six.text_type(e)}) except Exception as e: if settings.DEBUG: raise # this is much less useful/actionable without a URL # so make sure to include the request notify_exception(view_utils.get_request(), "Unexpected error building app") errors.append({'type': 'error', 'message': 'unexpected error: %s' % e}) return errors
def get_by_name(cls, name, strict=False): if not name: # get_by_name should never be called with name as None (or '', etc) # I fixed the code in such a way that if I raise a ValueError # all tests pass and basic pages load, # but in order not to break anything in the wild, # I'm opting to notify by email if/when this happens # but fall back to the previous behavior of returning None try: raise ValueError('%r is not a valid domain name' % name) except ValueError: if settings.DEBUG: raise else: notify_exception(None, '%r is not a valid domain name' % name) return None extra_args = {'stale': settings.COUCH_STALE_QUERY} if not strict else {} result = cls.view("domain/domains", key=name, reduce=False, include_docs=True, **extra_args ).first() if result is None and not strict: # on the off chance this is a brand new domain, try with strict return cls.get_by_name(name, strict=True) return result
def run_query(self, es_query, es_type=None): """ Run a more advanced POST based ES query Returns the raw query json back, or None if there's an error """ #todo: backend audit logging of all these types of queries if 'fields' in es_query or 'script_fields' in es_query: #nasty hack to add domain field to query that does specific fields. #do nothing if there's no field query because we get everything fields = es_query.get('fields', []) fields.append('domain') es_query['fields'] = fields es_base = self.es[self.index] if es_type is None else self.es[self.index][es_type] es_results = es_base.get('_search', data=es_query) if 'error' in es_results: msg = "Error in elasticsearch query [%s]: %s\nquery: %s" % (self.index, es_results['error'], es_query) notify_exception(None, message=msg) return None for res in es_results['hits']['hits']: res_domain = None if '_source' in res: #check source res_domain = res['_source'].get('domain', None) elif 'fields' in res: res_domain = res['fields'].get('domain', None) #check fields assert res_domain == self.domain, "Security check failed, search result domain did not match requester domain: %s != %s" % (res_domain, self.domain) return es_results
def _dispatcher(self): from corehq.apps.userreports.models import CUSTOM_REPORT_PREFIX from corehq.apps.userreports.reports.view import ( ConfigurableReport, CustomConfigurableReportDispatcher, ) dispatchers = [ ProjectReportDispatcher, CustomProjectReportDispatcher, ] for dispatcher in dispatchers: if dispatcher.prefix == self.report_type: return dispatcher() if self.report_type == 'configurable': if self.subreport_slug.startswith(CUSTOM_REPORT_PREFIX): return CustomConfigurableReportDispatcher() else: return ConfigurableReport() if self.doc_type != 'ReportConfig-Deleted': self.doc_type += '-Deleted' self.save() notify_exception( None, "This saved-report (id: %s) is unknown (report_type: %s) and so we have archived it" % ( self._id, self.report_type ) ) raise UnsupportedSavedReportError("Unknown dispatcher: %s" % self.report_type)
def post(self, request): """View's dispatch method automatically calls this""" def error_redirect(): return HttpResponseRedirect(reverse('upload_fixtures', args=[self.domain])) try: workbook = WorkbookJSONReader(request.file) except AttributeError: messages.error(request, _("Error processing your Excel (.xlsx) file")) return error_redirect() except Exception as e: messages.error(request, _("Invalid file-format. Please upload a valid xlsx file.")) return error_redirect() try: upload_result = run_upload(request, self.domain, workbook) if upload_result["unknown_groups"]: for group_name in upload_result["unknown_groups"]: messages.error(request, _("Unknown group: '%(name)s'") % {'name': group_name}) if upload_result["unknown_users"]: for user_name in upload_result["unknown_users"]: messages.error(request, _("Unknown user: '******'") % {'name': user_name}) except WorksheetNotFound as e: messages.error(request, _("Workbook does not contain a sheet called '%(title)s'") % {'title': e.title}) return error_redirect() except Exception as e: notify_exception(request) messages.error(request, _("Fixture upload could not complete due to the following error: '%(e)s'") % {'e': e}) return error_redirect() return HttpResponseRedirect(reverse('fixture_view', args=[self.domain]))
def upload_fixture_api(request, domain, **kwargs): response_codes = {"fail": 405, "warning": 402, "success": 200} error_messages = { "invalid_post_req": "Invalid post request. Submit the form with field 'file-to-upload' to upload a fixture", "has_no_permission": "User {attr} doesn't have permission to upload fixtures", "invalid_file": "Error processing your file. Submit a valid (.xlsx) file", "has_no_sheet": "Workbook does not have a sheet called {attr}", "has_no_column": "Fixture upload couldn't succeed due to the following error: {attr}", } def _return_response(code, message): resp_json = {} resp_json["code"] = code resp_json["message"] = message return HttpResponse(json.dumps(resp_json), mimetype="application/json") try: upload_file = request.FILES["file-to-upload"] except Exception: return _return_response(response_codes["fail"], error_messages["invalid_post_req"]) if not request.couch_user.has_permission(domain, Permissions.edit_data.name): error_message = error_messages["has_no_permission"].format(attr=request.couch_user.username) return _return_response(response_codes["fail"], error_message) try: workbook = WorkbookJSONReader(upload_file) except Exception: return _return_response(response_codes["fail"], error_messages["invalid_file"]) try: upload_resp = run_upload(request, domain, workbook) # error handle for other files except WorksheetNotFound as e: error_message = error_messages["has_no_sheet"].format(attr=e.title) return _return_response(response_codes["fail"], error_message) except Exception as e: notify_exception(request) error_message = error_messages["has_no_column"].format(attr=e) return _return_response(response_codes["fail"], error_message) num_unknown_groups = len(upload_resp["unknown_groups"]) num_unknown_users = len(upload_resp["unknown_users"]) resp_json = {} if not num_unknown_users and not num_unknown_groups: num_uploads = upload_resp["number_of_fixtures"] success_message = "Successfully uploaded %d fixture%s." % (num_uploads, 's' if num_uploads > 1 else '') return _return_response(response_codes["success"], success_message) else: resp_json["code"] = response_codes["warning"] warn_groups = "%d group%s unknown" % (num_unknown_groups, 's are' if num_unknown_groups > 1 else ' is') warn_users = "%d user%s unknown" % (num_unknown_users, 's are' if num_unknown_users > 1 else ' is') resp_json["message"] = "Fixtures have been uploaded. But following " if num_unknown_groups: resp_json["message"] += "%s %s" % (warn_groups, upload_resp["unknown_groups"]) if num_unknown_users: resp_json["message"] += "%s%s%s" % (("and following " if num_unknown_groups else "" ), warn_users, upload_resp["unknown_users"]) return HttpResponse(json.dumps(resp_json), mimetype="application/json")
def get_by_name(cls, name, strict=False): if not name: # get_by_name should never be called with name as None (or '', etc) # I fixed the code in such a way that if I raise a ValueError # all tests pass and basic pages load, # but in order not to break anything in the wild, # I'm opting to notify by email if/when this happens # but fall back to the previous behavior of returning None try: raise ValueError('%r is not a valid domain name' % name) except ValueError: if settings.DEBUG: raise else: notify_exception(None, '%r is not a valid domain name' % name) return None cache_key = _domain_cache_key(name) MISSING = object() res = cache.get(cache_key, MISSING) if res != MISSING: return res else: domain = cls._get_by_name(name, strict) # 30 mins, so any unforeseen invalidation bugs aren't too bad. cache.set(cache_key, domain, 30*60) return domain
def handle(self, **options): while True: try: self.create_tasks() except: notify_exception(None, message="Could not fetch due survey actions") sleep(10)
def get_by_name(cls, name, strict=False): if not name: # get_by_name should never be called with name as None (or '', etc) # I fixed the code in such a way that if I raise a ValueError # all tests pass and basic pages load, # but in order not to break anything in the wild, # I'm opting to notify by email if/when this happens # but fall back to the previous behavior of returning None try: raise ValueError('%r is not a valid domain name' % name) except ValueError: if settings.DEBUG: raise else: notify_exception(None, '%r is not a valid domain name' % name) return None def _get_by_name(stale=False): extra_args = {'stale': settings.COUCH_STALE_QUERY} if stale else {} result = cls.view("domain/domains", key=name, reduce=False, include_docs=True, **extra_args).first() if not isinstance(result, Domain): # A stale view may return a result with no doc if the doc has just been deleted. # In this case couchdbkit just returns the raw view result as a dict return None else: return result domain = _get_by_name(stale=(not strict)) if domain is None and not strict: # on the off chance this is a brand new domain, try with strict domain = _get_by_name(stale=False) return domain
def wrap(cls, data): if isinstance(data.get('seq'), (int, long)): data['seq'] = unicode(data['seq']) ret = super(ExportSchema, cls).wrap(data) if not ret.timestamp: # these won't work on bigcouch so we want to know if this happens notify_exception( None, 'an export without a timestamp was accessed! %s (%s)' % (ret.index, ret._id) ) # this isn't the cleanest nor is it perfect but in the event # this doc traversed databases somehow and now has a bad seq # id, make sure to just reset it to 0. # This won't catch if the seq is bad but not greater than the # current one). current_seq = cls.get_db().info()["update_seq"] try: if int(current_seq) < int(ret.seq): ret.seq = "0" ret.save() except ValueError: # seqs likely weren't ints (e.g. bigcouch) # this should never be possible (anything on bigcouch should # have a timestamp) so let's fail hard raise Exception('export %s is in a bad state (no timestamp or integer seq)' % ret._id) # TODO? handle seq -> datetime migration return ret
def process_incoming(msg, delay=True): v = VerifiedNumber.by_phone(msg.phone_number, include_pending=True) if v is not None and v.verified: msg.couch_recipient_doc_type = v.owner_doc_type msg.couch_recipient = v.owner_id msg.domain = v.domain msg.save() if msg.domain_scope: # only process messages for phones known to be associated with this domain if v is None or v.domain != msg.domain_scope: raise DomainScopeValidationError( 'Attempted to simulate incoming sms from phone number not ' \ 'verified with this domain' ) if v is not None and v.verified: for h in settings.SMS_HANDLERS: try: handler = to_function(h) except: notify_exception(None, message=('error loading sms handler: %s' % h)) continue try: was_handled = handler(v, msg.text, msg=msg) except Exception, e: log_sms_exception(msg) was_handled = False if was_handled: break
def case_changed_receiver(sender, case, **kwargs): try: from corehq.apps.sms.tasks import sync_case_phone_number sync_case_phone_number.delay(case) except Exception: notify_exception(None, message="Could not create sync_case_phone_number task for case %s" % case.case_id)
def process_cases_with_casedb(xforms, case_db, config=None): config = config or CaseProcessingConfig() case_processing_result = _get_or_update_cases(xforms, case_db) cases = case_processing_result.cases xform = xforms[0] # attach domain and export tag domain = xform.domain def attach_extras(case): case.domain = domain if domain: assert hasattr(case, 'type') case['#export_tag'] = ["domain", "type"] return case cases = [attach_extras(case) for case in cases] # handle updating the sync records for apps that use sync mode try: relevant_log = xform.get_sync_token() except ResourceNotFound: if LOOSE_SYNC_TOKEN_VALIDATION.enabled(xform.domain): relevant_log = None else: raise if relevant_log: # in reconciliation mode, things can be unexpected relevant_log.strict = config.strict_asserts from casexml.apps.case.util import update_sync_log_with_checks update_sync_log_with_checks(relevant_log, xform, cases, case_db, case_id_blacklist=config.case_id_blacklist) try: cases_received.send(sender=None, xform=xform, cases=cases) except Exception as e: # don't let the exceptions in signals prevent standard case processing notify_exception( None, 'something went wrong sending the cases_received signal ' 'for form %s: %s' % (xform._id, e) ) for case in cases: ActionsUpdateStrategy(case).reconcile_actions_if_necessary(xform) case_db.mark_changed(case) action_xforms = {action.xform_id for action in case.actions if action.xform_id} mismatched_forms = action_xforms ^ set(case.xform_ids) if mismatched_forms: logging.warning( "CASE XFORM MISMATCH /a/{},{}".format( domain, case.case_id ) ) case_processing_result.set_cases(cases) return case_processing_result
def custom_content_handler(reminder, handler, recipient, catch_up=False): """ This method is invoked from the reminder event-handling thread to retrieve the next message to send. """ case = reminder.case if catch_up: order = reminder.current_event_sequence_num else: message_offset = get_message_offset(case) try: assert message_offset is not None except: notify_exception(None, message=("Couldn't calculate the message offset. Check that " "the right case properties are set.")) return None order = get_message_number(reminder) - message_offset num_missed_windows = get_num_missed_windows(case) if (((not catch_up) and (order < num_missed_windows)) or catch_up and (order >= num_missed_windows)): return None randomized_message = get_randomized_message(case, order) if randomized_message: message = FRIMessageBankMessage.get(randomized_message.message_bank_message_id) return message.message else: return None
def get_action_response(self, action): try: assert self.reminder.domain == self.domain assert self.reminder.doc_type == "CaseReminderHandler" if self.reminder.locked: return { 'success': False, 'locked': True, } if action in [ACTION_ACTIVATE, ACTION_DEACTIVATE]: self.reminder.active = (action == ACTION_ACTIVATE) self.reminder.save() elif action == ACTION_DELETE: self.reminder.retire() return { 'success': True, } except Exception as e: msg = ("Couldn't process action '%s' for reminder definition" % action) notify_exception(None, message=msg, details={ 'domain': self.domain, 'handler_id': self.reminder_id, }) return { 'success': False, }
error_occurred = True error_msg = "" if sse.xformsresponse and sse.xformsresponse.event: xpath_arg = None if survey_keyword_action.use_named_args: xpath_arg = \ {v: k for k, v in survey_keyword_action.named_args.items()} field_name = get_question_id(sse.xformsresponse, xpath_arg) error_msg = get_message(MSG_FIELD_DESCRIPTOR, verified_number, (field_name, )) error_msg = "%s%s" % (error_msg, sse.response_text) log_error(MessagingEvent.ERROR_COULD_NOT_PROCESS_STRUCTURED_SMS, logged_subevent) except Exception: notify_exception(None, message=("Could not process structured sms for" "contact %s, domain %s, keyword %s" % (contact_id, domain, keyword))) error_occurred = True error_msg = get_message(MSG_TOUCHFORMS_ERROR, verified_number) log_error(MessagingEvent.ERROR_TOUCHFORMS_ERROR, logged_subevent) clean_up_and_send_response(msg, contact, session, error_occurred, error_msg, verified_number, send_response, logged_event, logged_subevent) return not error_occurred def add_keyword_metadata(msg, session): metadata = MessageMetadata( workflow=WORKFLOW_KEYWORD,
from __future__ import absolute_import from casexml.apps.case.models import CommCareCase from casexml.apps.case.signals import case_post_save from corehq.messaging.tasks import sync_case_for_messaging from corehq.form_processor.models import CommCareCaseSQL from corehq.form_processor.signals import sql_case_post_save from dimagi.utils.logging import notify_exception def messaging_case_changed_receiver(sender, case, **kwargs): try: sync_case_for_messaging.delay(case.domain, case.case_id) except Exception: notify_exception( None, message= "Could not create messaging case changed task. Is RabbitMQ running?" ) def connect_signals(): case_post_save.connect(messaging_case_changed_receiver, CommCareCase, dispatch_uid='messaging_couch_case_receiver') sql_case_post_save.connect(messaging_case_changed_receiver, CommCareCaseSQL, dispatch_uid='messaging_sql_case_receiver')
def log_celery_task_exception(task_id, exception, traceback, einfo, *args, **kwargs): notify_exception('Celery task failure', exec_info=einfo.exc_info)
class CreateScheduleInstanceActionDefinition(CaseRuleActionDefinition): alert_schedule = models.ForeignKey('scheduling.AlertSchedule', null=True, on_delete=models.PROTECT) timed_schedule = models.ForeignKey('scheduling.TimedSchedule', null=True, on_delete=models.PROTECT) # A List of [recipient_type, recipient_id] recipients = jsonfield.JSONField(default=list) # (Optional, ignored if None) The name of a case property whose value will be tracked # over time on the schedule instance as last_reset_case_property_value. # Every time the case property's value changes, the schedule's start date is # reset to the current date. reset_case_property_name = models.CharField(max_length=126, null=True) # A dict with the structure represented by SchedulerModuleInfo; # when enabled=True in this dict, the framework uses info related to the # specified visit number to set the start date for any schedule instances # created from this CreateScheduleInstanceActionDefinition. scheduler_module_info = jsonfield.JSONField(default=dict) class SchedulerModuleInfo(JsonObject): # Set to True to enable setting the start date of any schedule instances # based on the visit scheduler info details below enabled = BooleanProperty(default=False) # The app that contains the visit scheduler form being referenced app_id = StringProperty() # The unique_id of the visit scheduler form in the above app form_unique_id = StringProperty() # The visit number from which to pull the start date for any schedule # instances; this should be the 0-based index in the FormSchedule.visits list visit_number = IntegerProperty() # VISIT_WINDOW_START - the start date used will be the first date in the window # VISIT_WINDOW_END - the start date used will be the last date in the window # VISIT_WINDOW_DUE_DATE - the start date used will be the due date of the visit window_position = StringProperty(choices=[ VISIT_WINDOW_START, VISIT_WINDOW_END, VISIT_WINDOW_DUE_DATE ]) @property def schedule(self): if self.alert_schedule_id: return self.alert_schedule elif self.timed_schedule_id: return self.timed_schedule raise ValueError("Expected a schedule") @schedule.setter def schedule(self, value): from corehq.messaging.scheduling.models import AlertSchedule, TimedSchedule self.alert_schedule = None self.timed_schedule = None if isinstance(value, AlertSchedule): self.alert_schedule = value elif isinstance(value, TimedSchedule): self.timed_schedule = value else: raise TypeError( "Expected an instance of AlertSchedule or TimedSchedule") def notify_scheduler_integration_exception(self, case, scheduler_module_info): details = scheduler_module_info.to_json() details.update({ 'domain': case.domain, 'case_id': case.case_id, }) notify_exception( None, message="Error in messaging / visit scheduler integration", details=details)
def update_calculated_properties(): results = DomainES().fields(["name", "_id"]).run().hits all_stats = _all_domain_stats() for r in results: dom = r["name"] try: calced_props = { "_id": r["_id"], "cp_n_web_users": int(all_stats["web_users"].get(dom, 0)), "cp_n_active_cc_users": int(CALC_FNS["mobile_users"](dom)), "cp_n_cc_users": int(all_stats["commcare_users"].get(dom, 0)), "cp_n_active_cases": int(CALC_FNS["cases_in_last"](dom, 120)), "cp_n_users_submitted_form": total_distinct_users([dom]), "cp_n_inactive_cases": int(CALC_FNS["inactive_cases_in_last"](dom, 120)), "cp_n_30_day_cases": int(CALC_FNS["cases_in_last"](dom, 30)), "cp_n_60_day_cases": int(CALC_FNS["cases_in_last"](dom, 60)), "cp_n_90_day_cases": int(CALC_FNS["cases_in_last"](dom, 90)), "cp_n_cases": int(all_stats["cases"].get(dom, 0)), "cp_n_forms": int(all_stats["forms"].get(dom, 0)), "cp_n_forms_30_d": int(CALC_FNS["forms_in_last"](dom, 30)), "cp_n_forms_60_d": int(CALC_FNS["forms_in_last"](dom, 60)), "cp_n_forms_90_d": int(CALC_FNS["forms_in_last"](dom, 90)), "cp_first_form": CALC_FNS["first_form_submission"](dom, False), "cp_last_form": CALC_FNS["last_form_submission"](dom, False), "cp_is_active": CALC_FNS["active"](dom), "cp_has_app": CALC_FNS["has_app"](dom), "cp_last_updated": json_format_datetime(datetime.utcnow()), "cp_n_in_sms": int(CALC_FNS["sms"](dom, "I")), "cp_n_out_sms": int(CALC_FNS["sms"](dom, "O")), "cp_n_sms_ever": int(CALC_FNS["sms_in_last"](dom)), "cp_n_sms_30_d": int(CALC_FNS["sms_in_last"](dom, 30)), "cp_n_sms_60_d": int(CALC_FNS["sms_in_last"](dom, 60)), "cp_n_sms_90_d": int(CALC_FNS["sms_in_last"](dom, 90)), "cp_sms_ever": int(CALC_FNS["sms_in_last_bool"](dom)), "cp_sms_30_d": int(CALC_FNS["sms_in_last_bool"](dom, 30)), "cp_n_sms_in_30_d": int(CALC_FNS["sms_in_in_last"](dom, 30)), "cp_n_sms_in_60_d": int(CALC_FNS["sms_in_in_last"](dom, 60)), "cp_n_sms_in_90_d": int(CALC_FNS["sms_in_in_last"](dom, 90)), "cp_n_sms_out_30_d": int(CALC_FNS["sms_out_in_last"](dom, 30)), "cp_n_sms_out_60_d": int(CALC_FNS["sms_out_in_last"](dom, 60)), "cp_n_sms_out_90_d": int(CALC_FNS["sms_out_in_last"](dom, 90)), "cp_n_j2me_30_d": int(CALC_FNS["j2me_forms_in_last"](dom, 30)), "cp_n_j2me_60_d": int(CALC_FNS["j2me_forms_in_last"](dom, 60)), "cp_n_j2me_90_d": int(CALC_FNS["j2me_forms_in_last"](dom, 90)), "cp_j2me_90_d_bool": int(CALC_FNS["j2me_forms_in_last_bool"](dom, 90)), "cp_300th_form": CALC_FNS["300th_form_submission"](dom) } if calced_props['cp_first_form'] is None: del calced_props['cp_first_form'] if calced_props['cp_last_form'] is None: del calced_props['cp_last_form'] if calced_props['cp_300th_form'] is None: del calced_props['cp_300th_form'] send_to_elasticsearch("domains", calced_props) except Exception, e: notify_exception( None, message='Domain {} failed on stats calculations with {}'. format(dom, e))
def get_apps_base_context(request, domain, app): lang, langs = get_langs(request, app) if getattr(request, 'couch_user', None): timezone = get_timezone_for_user(request.couch_user, domain) else: timezone = None context = { 'lang': lang, 'langs': langs, 'domain': domain, 'app': app, 'app_subset': { 'commcare_minor_release': app.commcare_minor_release, 'doc_type': app.get_doc_type(), 'form_counts_by_module': [len(m.forms) for m in app.modules] if not app.is_remote_app() else [], 'version': app.version, } if app else {}, 'timezone': timezone, } if app and not app.is_remote_app(): app.assert_app_v2() show_advanced = ( toggles.APP_BUILDER_ADVANCED.enabled(domain) or getattr(app, 'commtrack_enabled', False) ) show_biometric = ( toggles.BIOMETRIC_INTEGRATION.enabled(domain) and app.is_biometric_enabled ) disable_report_modules = ( is_master_linked_domain(domain) and not toggles.MOBILE_UCR_LINKED_DOMAIN.enabled(domain) ) # ideally this should be loaded on demand practice_users = [] if app.enable_practice_users: try: practice_users = get_practice_mode_mobile_workers(request.domain) except ESError: notify_exception(request, 'Error getting practice mode mobile workers') latest_version_for_build_profiles = {} if toggles.RELEASE_BUILDS_PER_PROFILE.enabled(domain): latest_version_for_build_profiles = get_latest_enabled_versions_per_profile(app.get_id) context.update({ 'show_advanced': show_advanced, 'show_biometric': show_biometric, 'show_report_modules': toggles.MOBILE_UCR.enabled(domain), 'disable_report_modules': disable_report_modules, 'show_shadow_modules': toggles.APP_BUILDER_SHADOW_MODULES.enabled(domain), 'show_shadow_forms': show_advanced, 'show_training_modules': toggles.TRAINING_MODULE.enabled(domain) and app.enable_training_modules, 'practice_users': [{"id": u['_id'], "text": u["username"]} for u in practice_users], 'latest_version_for_build_profiles': latest_version_for_build_profiles, }) return context
def get_app_view_context(request, app): """ This provides the context to render commcare settings on Edit Application Settings page This is where additional app or domain specific context can be added to any individual commcare-setting defined in commcare-app-settings.yaml or commcare-profile-settings.yaml """ context = {} settings_layout = copy.deepcopy( get_commcare_settings_layout(app) ) for section in settings_layout: new_settings = [] for setting in section['settings']: toggle_name = setting.get('toggle') if toggle_name and not toggle_enabled(request, toggle_name): continue privilege_name = setting.get('privilege') if privilege_name and not has_privilege(request, privilege_name): continue disable_if_true = setting.get('disable_if_true') if disable_if_true and getattr(app, setting['id']): continue if is_linked_app(app): if setting['id'] in app.SUPPORTED_SETTINGS: if setting['id'] not in app.linked_app_attrs: setting['is_inherited'] = True new_settings.append(setting) section['settings'] = new_settings app_view_options = { 'permissions': { 'cloudcare': has_privilege(request, privileges.CLOUDCARE), 'case_sharing_groups': has_privilege(request, privileges.CASE_SHARING_GROUPS), }, 'sections': settings_layout, 'urls': { 'save': reverse("edit_commcare_settings", args=(app.domain, app.id)), }, 'user': { 'is_previewer': request.couch_user.is_previewer(), }, 'values': get_settings_values(app), 'warning': _("This is not an allowed value for this field"), } if (app.get_doc_type() == 'Application' and toggles.CUSTOM_PROPERTIES.enabled(request.domain) and 'custom_properties' in getattr(app, 'profile', {})): custom_properties_array = [{'key': p[0], 'value': p[1]} for p in app.profile.get('custom_properties').items()] app_view_options.update({'customProperties': custom_properties_array}) context.update({ 'app_view_options': app_view_options, }) build_config = CommCareBuildConfig.fetch() options = build_config.get_menu() if not request.user.is_superuser and not toggles.IS_CONTRACTOR.enabled(request.user.username): options = [option for option in options if not option.superuser_only] options_map = defaultdict(lambda: {"values": [], "value_names": []}) for option in options: builds = options_map[option.build.major_release()] builds["values"].append(option.build.to_string()) builds["value_names"].append(option.get_label()) if "default" not in builds: app_ver = MAJOR_RELEASE_TO_VERSION[option.build.major_release()] builds["default"] = build_config.get_default(app_ver).to_string() def _get_setting(setting_type, setting_id): # get setting dict from settings_layout if not settings_layout: return None matched = [x for x in [ setting for section in settings_layout for setting in section['settings'] ] if x['type'] == setting_type and x['id'] == setting_id] if matched: return matched[0] else: return None build_spec_setting = _get_setting('hq', 'build_spec') if build_spec_setting: build_spec_setting['options_map'] = options_map build_spec_setting['default_app_version'] = app.application_version practice_user_setting = _get_setting('hq', 'practice_mobile_worker_id') if practice_user_setting and has_privilege(request, privileges.PRACTICE_MOBILE_WORKERS): try: practice_users = get_practice_mode_mobile_workers(request.domain) except ESError: notify_exception(request, 'Error getting practice mode mobile workers') practice_users = [] practice_user_setting['values'] = [''] + [u['_id'] for u in practice_users] practice_user_setting['value_names'] = [_('Not set')] + [u['username'] for u in practice_users] context.update({ 'bulk_ui_translation_upload': { 'action': reverse('upload_bulk_ui_translations', args=(app.domain, app.get_id)), 'download_url': reverse('download_bulk_ui_translations', args=(app.domain, app.get_id)), 'adjective': _("U\u200BI translation"), 'plural_noun': _("U\u200BI translations"), }, 'bulk_app_translation_upload': { 'action': reverse('upload_bulk_app_translations', args=(app.domain, app.get_id)), 'download_url': reverse('download_bulk_app_translations', args=(app.domain, app.get_id)), 'adjective': _("app translation"), 'plural_noun': _("app translations"), 'can_select_language': toggles.BULK_UPDATE_MULTIMEDIA_PATHS.enabled_for_request(request), 'can_validate_app_translations': toggles.VALIDATE_APP_TRANSLATIONS.enabled_for_request(request), }, }) context.update({ 'bulk_ui_translation_form': get_bulk_upload_form( context, context_key="bulk_ui_translation_upload", ), 'bulk_app_translation_form': get_bulk_upload_form( context, context_key="bulk_app_translation_upload", form_class=AppTranslationsBulkUploadForm, ), }) context.update({ 'smart_lang_display_enabled': getattr(app, 'smart_lang_display', False) }) context.update({ 'is_linked_app': is_linked_app(app), 'is_remote_app': is_remote_app(app), }) if is_linked_app(app): try: master_versions_by_id = app.get_latest_master_releases_versions() master_briefs = [brief for brief in app.get_master_app_briefs() if brief.id in master_versions_by_id] except RemoteRequestError: messages.error(request, "Unable to reach remote master server. Please try again later.") master_versions_by_id = {} master_briefs = [] upstream_brief = {} for b in master_briefs: if b.id == app.upstream_app_id: upstream_brief = b context.update({ 'master_briefs': master_briefs, 'master_versions_by_id': master_versions_by_id, 'multiple_masters': app.enable_multi_master and len(master_briefs) > 1, 'upstream_version': app.upstream_version, 'upstream_brief': upstream_brief, 'upstream_url': _get_upstream_url(app, request.couch_user), 'upstream_url_template': _get_upstream_url(app, request.couch_user, master_app_id='---'), }) return context
def _get_and_send_report(self, language, emails): from corehq.apps.reports.views import get_scheduled_report_response, render_full_report_notification from corehq.apps.reports.standard.deployments import ApplicationStatusReport with localize(language): title = (_(DEFAULT_REPORT_NOTIF_SUBJECT) if self.email_subject == DEFAULT_REPORT_NOTIF_SUBJECT else self.email_subject) attach_excel = getattr(self, 'attach_excel', False) excel_files = None try: report_text, excel_files = get_scheduled_report_response( self.owner, self.domain, self._id, attach_excel=attach_excel, send_only_active=True) # Both are empty if ALL the ReportConfigs in the ReportNotification # have a start_date in the future. if not report_text and not excel_files: return for email in emails: body = render_full_report_notification( None, report_text, email, self).content send_html_email_async( title, email, body, email_from=settings.DEFAULT_FROM_EMAIL, file_attachments=excel_files, smtp_exception_skip_list=LARGE_FILE_SIZE_ERROR_CODES) except Exception as er: notify_exception( None, message= "Encountered error while generating report or sending email", details={ 'subject': title, 'recipients': str(emails), 'error': er, }) if excel_files: message = _( "Unable to generate email report. Excel files are attached." ) send_html_email_async( title, email, message, email_from=settings.DEFAULT_FROM_EMAIL, file_attachments=excel_files, smtp_exception_skip_list=LARGE_FILE_SIZE_ERROR_CODES) elif getattr(er, 'smtp_code', None) in LARGE_FILE_SIZE_ERROR_CODES or type( er) == ESError: # If the email doesn't work because it is too large to fit in the HTML body, # send it as an excel attachment, by creating a mock request with the right data. for report_config in self.configs: mock_request = HttpRequest() mock_request.couch_user = self.owner mock_request.user = self.owner.get_django_user() mock_request.domain = self.domain mock_request.couch_user.current_domain = self.domain mock_request.couch_user.language = self.language mock_request.method = 'GET' mock_request.bypass_two_factor = True mock_query_string_parts = [ report_config.query_string, 'filterSet=true' ] mock_request.GET = QueryDict( '&'.join(mock_query_string_parts)) request_data = vars(mock_request) request_data[ 'couch_user'] = mock_request.couch_user.userID if report_config.report_slug != ApplicationStatusReport.slug: # ApplicationStatusReport doesn't have date filter date_range = report_config.get_date_range() start_date = datetime.strptime( date_range['startdate'], '%Y-%m-%d') end_date = datetime.strptime( date_range['enddate'], '%Y-%m-%d') datespan = DateSpan(start_date, end_date) request_data['datespan'] = datespan full_request = { 'request': request_data, 'domain': request_data['domain'], 'context': {}, 'request_params': json_request(request_data['GET']) } export_all_rows_task(report_config.report, full_request, emails, title)
def get_form_view_context_and_template(request, domain, form, langs, current_lang, messages=messages): xform_questions = [] xform = None form_errors = [] xform_validation_errored = False xform_validation_missing = False try: xform = form.wrapped_xform() except XFormException as e: form_errors.append("Error in form: %s" % e) except Exception as e: logging.exception(e) form_errors.append("Unexpected error in form: %s" % e) if xform and xform.exists(): if xform.already_has_meta(): messages.warning( request, "This form has a meta block already! " "It may be replaced by CommCare HQ's standard meta block." ) try: xform_questions = xform.get_questions(langs, include_triggers=True) form.validate_form() except etree.XMLSyntaxError as e: form_errors.append("Syntax Error: %s" % e) except AppEditingError as e: form_errors.append("Error in application: %s" % e) except XFormValidationError: xform_validation_errored = True # showing these messages is handled by validate_form_for_build ajax pass except XFormValidationFailed: xform_validation_missing = True messages.warning(request, _("Unable to validate form due to server error.")) except XFormException as e: form_errors.append("Error in form: %s" % e) # any other kind of error should fail hard, # but for now there are too many for that to be practical except Exception as e: if settings.DEBUG: raise notify_exception(request, 'Unexpected Build Error') form_errors.append("Unexpected System Error: %s" % e) else: # remove upload questions (attachments) until MM Case Properties # are released to general public is_previewer = toggles.MM_CASE_PROPERTIES.enabled_for_request(request) xform_questions = [q for q in xform_questions if q["tag"] != "upload" or is_previewer] if not form_errors and not xform_validation_missing and not xform_validation_errored: try: form_action_errors = form.validate_for_build() if not form_action_errors: form.add_stuff_to_xform(xform) except CaseError as e: messages.error(request, "Error in Case Management: %s" % e) except XFormException as e: messages.error(request, six.text_type(e)) except Exception as e: if settings.DEBUG: raise logging.exception(six.text_type(e)) messages.error(request, "Unexpected Error: %s" % e) try: languages = xform.get_languages() except Exception: languages = [] for err in form_errors: messages.error(request, err) module_case_types = [] app = form.get_app() all_modules = list(app.get_modules()) for module in all_modules: for case_type in module.get_case_types(): module_case_types.append({ 'id': module.unique_id, 'module_name': trans(module.name, langs), 'case_type': case_type, 'module_type': module.doc_type }) module = form.get_module() if not form.unique_id: form.get_unique_id() app.save() allow_usercase = domain_has_privilege(request.domain, privileges.USER_CASE) valid_index_names = list(DEFAULT_CASE_INDEX_IDENTIFIERS.values()) if allow_usercase: valid_index_names.append(USERCASE_PREFIX[0:-1]) # strip trailing slash form_has_schedule = isinstance(form, AdvancedForm) and module.has_schedule case_config_options = { 'caseType': form.get_case_type(), 'moduleCaseTypes': module_case_types, 'propertiesMap': get_all_case_properties(app), 'propertyDescriptions': get_case_property_description_dict(domain), 'questions': xform_questions, 'reserved_words': load_case_reserved_words(), 'usercasePropertiesMap': get_usercase_properties(app), } context = { 'nav_form': form, 'xform_languages': languages, 'form_errors': form_errors, 'xform_validation_errored': xform_validation_errored, 'xform_validation_missing': xform_validation_missing, 'allow_form_copy': isinstance(form, (Form, AdvancedForm)), 'allow_form_filtering': not form_has_schedule, 'allow_form_workflow': True, 'uses_form_workflow': form.post_form_workflow == WORKFLOW_FORM, 'allow_usercase': allow_usercase, 'is_usercase_in_use': is_usercase_in_use(request.domain), 'is_module_filter_enabled': app.enable_module_filtering, 'is_training_module': module.is_training_module, 'is_allowed_to_be_release_notes_form': form.is_allowed_to_be_release_notes_form, 'root_requires_same_case': module.root_requires_same_case(), 'is_case_list_form': form.is_case_list_form, 'edit_name_url': reverse('edit_form_attr', args=[app.domain, app.id, form.unique_id, 'name']), 'form_filter_patterns': { 'case_substring': CASE_XPATH_SUBSTRING_MATCHES, 'usercase_substring': USER_CASE_XPATH_SUBSTRING_MATCHES, }, 'custom_instances': [ {'instanceId': instance.instance_id, 'instancePath': instance.instance_path} for instance in form.custom_instances ], 'custom_assertions': [ {'test': assertion.test, 'text': assertion.text.get(current_lang)} for assertion in form.custom_assertions ], 'can_preview_form': request.couch_user.has_permission(domain, 'edit_data'), 'form_icon': None, 'legacy_select2': False, } if toggles.CUSTOM_ICON_BADGES.enabled(domain): context['form_icon'] = form.custom_icon if form.custom_icon else CustomIcon() if toggles.COPY_FORM_TO_APP.enabled_for_request(request): context['apps_modules'] = get_apps_modules(domain, app.id, module.unique_id) if context['allow_form_workflow'] and toggles.FORM_LINK_WORKFLOW.enabled(domain): def qualified_form_name(form, auto_link): module_name = trans(module.name, langs) form_name = trans(form.name, langs) star = '* ' if auto_link else ' ' return "{}{} -> {}".format(star, module_name, form_name) modules = [m for m in all_modules if m.case_type == module.case_type] if getattr(module, 'root_module_id', None) and module.root_module not in modules: modules.append(module.root_module) auto_linkable_forms = list(itertools.chain.from_iterable(list(m.get_forms()) for m in modules)) def linkable_form(candidate_form): auto_link = candidate_form in auto_linkable_forms return { 'unique_id': candidate_form.unique_id, 'name': qualified_form_name(candidate_form, auto_link), 'auto_link': auto_link } context['linkable_forms'] = [ linkable_form(candidate_form) for candidate_module in all_modules for candidate_form in candidate_module.get_forms() ] if isinstance(form, AdvancedForm): def commtrack_programs(): if app.commtrack_enabled: programs = Program.by_domain(app.domain) return [{'value': program.get_id, 'label': program.name} for program in programs] else: return [] all_programs = [{'value': '', 'label': _('All Programs')}] case_config_options.update({ 'commtrack_enabled': app.commtrack_enabled, 'commtrack_programs': all_programs + commtrack_programs(), 'module_id': module.unique_id, 'save_url': reverse("edit_advanced_form_actions", args=[app.domain, app.id, form.unique_id]), }) if form.form_type == "shadow_form": case_config_options.update({ 'actions': form.extra_actions, 'isShadowForm': True, }) else: case_config_options.update({ 'actions': form.actions, 'isShadowForm': False, }) if getattr(module, 'has_schedule', False): schedule_options = get_schedule_context(form) schedule_options.update({ 'phase': schedule_options['schedule_phase'], 'questions': xform_questions, 'save_url': reverse("edit_visit_schedule", args=[app.domain, app.id, form.unique_id]), 'schedule': form.schedule, }) context.update({ 'schedule_options': schedule_options, }) else: context.update({ 'show_custom_ref': toggles.APP_BUILDER_CUSTOM_PARENT_REF.enabled_for_request(request), }) case_config_options.update({ 'actions': form.actions, 'allowUsercase': allow_usercase, 'save_url': reverse("edit_form_actions", args=[app.domain, app.id, form.unique_id]), 'valid_index_names': valid_index_names, }) context.update({'case_config_options': case_config_options}) return "app_manager/form_view.html", context
def _on_error(change_meta, exc_info): _audit_log(CHANGE_ERROR, change_meta) notify_exception(None, 'Problem sending change to Kafka (async)', details=change_meta.to_json(), exec_info=exc_info)
from casexml.apps.case.models import CommCareCase from casexml.apps.case.signals import case_post_save from corehq.apps.reminders.tasks import case_changed from corehq.form_processor.models import CommCareCaseSQL from corehq.form_processor.signals import sql_case_post_save from dimagi.utils.logging import notify_exception def case_changed_receiver(sender, case, **kwargs): """ Spawns a task to update reminder instances tied to the given case. """ try: case_changed.delay(case.domain, case.case_id) except Exception: notify_exception( None, message="Could not create reminders case_changed task for case %s" % case.case_id) case_post_save.connect(case_changed_receiver, CommCareCase) sql_case_post_save.connect(case_changed_receiver, CommCareCaseSQL)
def get_report_content(self, lang, attach_excel=False): """ Get the report's HTML content as rendered by the static view format. """ try: if self.report is None: return ReportContent( _("The report used to create this scheduled report is no" " longer available on CommCare HQ. Please delete this" " scheduled report and create a new one using an available" " report."), None, ) except Exception: pass from django.http import HttpRequest, QueryDict mock_request = HttpRequest() mock_request.couch_user = self.owner mock_request.user = self.owner.get_django_user() mock_request.domain = self.domain mock_request.couch_user.current_domain = self.domain mock_request.couch_user.language = lang mock_request.method = 'GET' mock_query_string_parts = [self.query_string, 'filterSet=true'] if self.is_configurable_report: mock_query_string_parts.append(urlencode(self.filters, True)) mock_query_string_parts.append( urlencode(self.get_date_range(), True)) mock_request.GET = QueryDict('&'.join(mock_query_string_parts)) # Make sure the request gets processed by PRBAC Middleware CCHQPRBACMiddleware.apply_prbac(mock_request) try: dispatch_func = functools.partial( self._dispatcher.__class__.as_view(), mock_request, **self.view_kwargs) email_response = dispatch_func(render_as='email') if email_response.status_code == 302: return ReportContent( _("We are sorry, but your saved report '%(config_name)s' " "is no longer accessible because the owner %(username)s " "is no longer active.") % { 'config_name': self.name, 'username': self.owner.username }, None, ) return ReportContent( json.loads(email_response.content)['report'], dispatch_func(render_as='excel') if attach_excel else None, ) except PermissionDenied: return ReportContent( _("We are sorry, but your saved report '%(config_name)s' " "is no longer accessible because your subscription does " "not allow Custom Reporting. Please talk to your Project " "Administrator about enabling Custom Reports. If you " "want CommCare HQ to stop sending this message, please " "visit %(saved_reports_url)s to remove this " "Emailed Report.") % { 'config_name': self.name, 'saved_reports_url': absolute_reverse('saved_reports', args=[mock_request.domain]), }, None, ) except Http404: return ReportContent( _("We are sorry, but your saved report '%(config_name)s' " "can not be generated since you do not have the correct permissions. " "Please talk to your Project Administrator about getting permissions for this" "report.") % { 'config_name': self.name, }, None, ) except UnsupportedSavedReportError: return ReportContent( _("We are sorry, but your saved report '%(config_name)s' " "is no longer available. If you think this is a mistake, please report an issue." ) % { 'config_name': self.name, }, None, ) except Exception: notify_exception(None, "Error generating report: {}".format( self.report_slug), details={ 'domain': self.domain, 'user': self.owner.username, 'report': self.report_slug, 'report config': self.get_id }) return ReportContent( _("An error occurred while generating this report."), None)
def location_restricted_exception(request): from corehq.apps.hqwebapp.views import no_permissions_exception notify_exception(request, NOTIFY_EXCEPTION_MSG) return no_permissions_exception(request, message=LOCATION_ACCESS_DENIED)
def _edit_form_attr(request, domain, app_id, form_unique_id, attr): """ Called to edit any (supported) form attribute, given by attr """ ajax = json.loads(request.POST.get('ajax', 'true')) resp = {} app = get_app(domain, app_id) try: form = app.get_form(form_unique_id) except FormNotFoundException as e: if ajax: return HttpResponseBadRequest(six.text_type(e)) else: messages.error(request, _("There was an error saving, please try again!")) return back_to_main(request, domain, app_id=app_id) lang = request.COOKIES.get('lang', app.langs[0]) def should_edit(attribute): return attribute in request.POST if 'sha1' in request.POST and (should_edit("xform") or "xform" in request.FILES): conflict = _get_xform_conflict_response(form, request.POST['sha1']) if conflict is not None: return conflict if should_edit("name"): name = request.POST['name'] form.name[lang] = name if not form.form_type == "shadow_form": xform = form.wrapped_xform() if xform.exists(): xform.set_name(name) save_xform(app, form, xform.render()) resp['update'] = {'.variable-form_name': trans(form.name, [lang], use_delim=False)} if should_edit('comment'): form.comment = request.POST['comment'] if should_edit("xform") or "xform" in request.FILES: try: # support FILES for upload and POST for ajax post from Vellum try: xform = request.FILES.get('xform').read() except Exception: xform = request.POST.get('xform') else: try: xform = six.text_type(xform, encoding="utf-8") except Exception: raise Exception("Error uploading form: Please make sure your form is encoded in UTF-8") if request.POST.get('cleanup', False): try: # First, we strip all newlines and reformat the DOM. px = parseString(xform.replace('\r\n', '')).toprettyxml() # Then we remove excess newlines from the DOM output. text_re = re.compile(r'>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL) prettyXml = text_re.sub(r'>\g<1></', px) xform = prettyXml except Exception: pass if xform: if isinstance(xform, six.text_type): xform = xform.encode('utf-8') save_xform(app, form, xform) else: raise Exception("You didn't select a form to upload") except Exception as e: notify_exception(request, six.text_type(e)) if ajax: return HttpResponseBadRequest(six.text_type(e)) else: messages.error(request, six.text_type(e)) if should_edit("references") or should_edit("case_references"): form.case_references = _get_case_references(request.POST) if should_edit("show_count"): show_count = request.POST['show_count'] form.show_count = True if show_count == "True" else False if should_edit("put_in_root"): put_in_root = request.POST['put_in_root'] form.put_in_root = True if put_in_root == "True" else False if should_edit('form_filter'): form.form_filter = request.POST['form_filter'] if should_edit('post_form_workflow'): form.post_form_workflow = request.POST['post_form_workflow'] if should_edit('auto_gps_capture'): form.auto_gps_capture = request.POST['auto_gps_capture'] == 'true' if should_edit('is_release_notes_form'): form.is_release_notes_form = request.POST['is_release_notes_form'] == 'true' if should_edit('enable_release_notes'): form.enable_release_notes = request.POST['enable_release_notes'] == 'true' if not form.is_release_notes_form and form.enable_release_notes: return json_response( {'message': _("You can't enable a form as release notes without allowing it as " "a release notes form <TODO messaging>")}, status_code=400 ) if should_edit('no_vellum'): form.no_vellum = request.POST['no_vellum'] == 'true' if (should_edit("form_links_xpath_expressions") and should_edit("form_links_form_ids") and toggles.FORM_LINK_WORKFLOW.enabled(domain)): form_links = zip( request.POST.getlist('form_links_xpath_expressions'), request.POST.getlist('form_links_form_ids'), [ json.loads(datum_json) if datum_json else [] for datum_json in request.POST.getlist('datums_json') ], ) form.form_links = [FormLink( xpath=link[0], form_id=link[1], datums=[ FormDatum(name=datum['name'], xpath=datum['xpath']) for datum in link[2] ] ) for link in form_links] if should_edit('post_form_workflow_fallback'): form.post_form_workflow_fallback = request.POST.get('post_form_workflow_fallback') if should_edit('custom_instances'): instances = json.loads(request.POST.get('custom_instances')) try: # validate that custom instances can be added into the XML for instance in instances: etree.fromstring( "<instance id='{}' src='{}' />".format( instance.get('instanceId'), instance.get('instancePath') ) ) except etree.XMLSyntaxError as error: return json_response( {'message': _("There was an issue with your custom instances: {}").format(error.message)}, status_code=400 ) form.custom_instances = [ CustomInstance( instance_id=instance.get("instanceId"), instance_path=instance.get("instancePath"), ) for instance in instances ] if should_edit('custom_assertions'): assertions = json.loads(request.POST.get('custom_assertions')) try: # validate that custom assertions can be added into the XML for assertion in assertions: etree.fromstring( '<assertion test="{test}"><text><locale id="abc.def"/>{text}</text></assertion>'.format( **assertion ) ) except etree.XMLSyntaxError as error: return json_response( {'message': _("There was an issue with your custom assertions: {}").format(error.message)}, status_code=400 ) existing_assertions = {assertion.test: assertion for assertion in form.custom_assertions} new_assertions = [] for assertion in assertions: try: new_assertion = existing_assertions[assertion.get('test')] new_assertion.text[lang] = assertion.get('text') except KeyError: new_assertion = CustomAssertion( test=assertion.get('test'), text={lang: assertion.get('text')} ) new_assertions.append(new_assertion) form.custom_assertions = new_assertions if should_edit("shadow_parent"): form.shadow_parent_form_id = request.POST['shadow_parent'] if should_edit("custom_icon_form"): error_message = handle_custom_icon_edits(request, form, lang) if error_message: return json_response( {'message': error_message}, status_code=400 ) handle_media_edits(request, form, should_edit, resp, lang) app.save(resp) notify_form_changed(domain, request.couch_user, app_id, form_unique_id) if ajax: return HttpResponse(json.dumps(resp)) else: return back_to_main(request, domain, app_id=app_id, form_unique_id=form_unique_id)
def _build_async_indicators(indicator_doc_ids): def handle_exception(exception, config_id, doc, adapter): metric = None if isinstance(exception, (ProtocolError, ReadTimeout)): metric = 'commcare.async_indicator.riak_error' elif isinstance(exception, (ESError, ConnectionTimeout)): # a database had an issue so log it and go on to the next document metric = 'commcare.async_indicator.es_error' elif isinstance(exception, (DatabaseError, InternalError)): # a database had an issue so log it and go on to the next document metric = 'commcare.async_indicator.psql_error' else: # getting the config could fail before the adapter is set if adapter: adapter.handle_exception(doc, exception) if metric: datadog_counter(metric, 1, tags={ 'config_id': config_id, 'doc_id': doc['_id'] }) def doc_ids_from_rows(rows): formatted_rows = [{ column.column.database_column_name.decode('utf-8'): column.value for column in row } for row in rows] return set(row['doc_id'] for row in formatted_rows) # tracks processed/deleted configs to be removed from each indicator configs_to_remove_by_indicator_id = defaultdict(list) def _mark_config_to_remove(config_id, indicator_ids): for _id in indicator_ids: configs_to_remove_by_indicator_id[_id].append(config_id) timer = TimingContext() lock_keys = [ get_async_indicator_modify_lock_key(indicator_id) for indicator_id in indicator_doc_ids ] with CriticalSection(lock_keys): all_indicators = AsyncIndicator.objects.filter( doc_id__in=indicator_doc_ids) if not all_indicators: return doc_store = get_document_store_for_doc_type(all_indicators[0].domain, all_indicators[0].doc_type) failed_indicators = set() rows_to_save_by_adapter = defaultdict(list) indicator_by_doc_id = {i.doc_id: i for i in all_indicators} config_ids = set() with timer: for doc in doc_store.iter_documents( list(indicator_by_doc_id.keys())): indicator = indicator_by_doc_id[doc['_id']] eval_context = EvaluationContext(doc) for config_id in indicator.indicator_config_ids: config_ids.add(config_id) try: config = _get_config_by_id(config_id) except (ResourceNotFound, StaticDataSourceConfigurationNotFoundError): celery_task_logger.info( "{} no longer exists, skipping".format(config_id)) # remove because the config no longer exists _mark_config_to_remove(config_id, [indicator.pk]) continue except ESError: celery_task_logger.info( "ES errored when trying to retrieve config") failed_indicators.add(indicator) continue adapter = None try: adapter = get_indicator_adapter(config) rows_to_save_by_adapter[adapter].extend( adapter.get_all_values(doc, eval_context)) eval_context.reset_iteration() except Exception as e: failed_indicators.add(indicator) handle_exception(e, config_id, doc, adapter) for adapter, rows in six.iteritems(rows_to_save_by_adapter): doc_ids = doc_ids_from_rows(rows) indicators = [ indicator_by_doc_id[doc_id] for doc_id in doc_ids ] try: adapter.save_rows(rows) except Exception as e: failed_indicators.union(indicators) message = e.message if isinstance(message, bytes): # TODO - figure out where these are coming from and use unicode message from the start message = repr(message) notify_exception( None, "Exception bulk saving async indicators:{}".format( message)) else: # remove because it's sucessfully processed _mark_config_to_remove(config_id, [i.pk for i in indicators]) # delete fully processed indicators processed_indicators = set(all_indicators) - failed_indicators AsyncIndicator.objects.filter( pk__in=[i.pk for i in processed_indicators]).delete() # update failure for failed indicators with transaction.atomic(): for indicator in failed_indicators: indicator.update_failure( configs_to_remove_by_indicator_id.get(indicator.pk, [])) indicator.save() datadog_counter('commcare.async_indicator.processed_success', len(processed_indicators)) datadog_counter('commcare.async_indicator.processed_fail', len(failed_indicators)) datadog_histogram('commcare.async_indicator.processing_time', timer.duration / len(indicator_doc_ids), tags=[ 'config_ids:{}'.format(config_ids), ])
def _handle_es_exception(request, exception, query_addition_debug_details): notify_exception(request, str(exception), details=dict(exception_type=type(exception), **query_addition_debug_details)) return HttpResponse(status=500)
def send_mail_async(self, subject, message, from_email, recipient_list, messaging_event_id=None, domain=None): """ Call with send_mail_async.delay(*args, **kwargs) - sends emails in the main celery queue - if sending fails, retry in 15 min - retry a maximum of 10 times """ from corehq.util.soft_assert import soft_assert soft_assert('{}@dimagi.com'.format('skelly'))( all(recipient for recipient in recipient_list), 'Blank email addresses', { 'subject': subject, 'message': message, 'recipients': recipient_list } ) recipient_list = [_f for _f in recipient_list if _f] # todo deal with recipients marked as bounced from dimagi.utils.django.email import get_valid_recipients, mark_local_bounced_email filtered_recipient_list = get_valid_recipients(recipient_list, domain) bounced_recipients = list(set(recipient_list) - set(filtered_recipient_list)) if bounced_recipients and messaging_event_id: mark_local_bounced_email(bounced_recipients, messaging_event_id) if not filtered_recipient_list: return headers = {} if settings.RETURN_PATH_EMAIL: headers['Return-Path'] = settings.RETURN_PATH_EMAIL if messaging_event_id is not None: headers[COMMCARE_MESSAGE_ID_HEADER] = messaging_event_id if settings.SES_CONFIGURATION_SET is not None: headers[SES_CONFIGURATION_SET_HEADER] = settings.SES_CONFIGURATION_SET try: message = EmailMessage( subject=subject, body=message, from_email=from_email, to=filtered_recipient_list, headers=headers, ) return message.send() except SMTPDataError as e: # If the SES configuration has not been properly set up, resend the message if ( "Configuration Set does not exist" in repr(e.smtp_error) and SES_CONFIGURATION_SET_HEADER in message.extra_headers ): del message.extra_headers[SES_CONFIGURATION_SET_HEADER] message.send() notify_exception(None, message="SES Configuration Set missing", details={'error': e}) else: raise except Exception as e: notify_exception( None, message="Encountered error while sending email", details={ 'subject': subject, 'recipients': ', '.join(filtered_recipient_list), 'error': e, 'messaging_event_id': messaging_event_id, } ) if messaging_event_id is not None: mark_subevent_gateway_error(messaging_event_id, e, retrying=True) try: self.retry(exc=e) except MaxRetriesExceededError: if messaging_event_id is not None: mark_subevent_gateway_error(messaging_event_id, e, retrying=False)
def create_files_for_ccz(build, build_profile_id, include_multimedia_files=True, include_index_files=True, download_id=None, compress_zip=False, filename="commcare.zip", download_targeted_version=False, task=None, expose_link=False): """ :param task: celery task whose progress needs to be set when being run asynchronously by celery :param expose_link: expose downloadable link for the file created :return: path to the ccz file """ compression = zipfile.ZIP_DEFLATED if compress_zip else zipfile.ZIP_STORED current_progress = 10 # early on indicate something is happening file_progress = 50.0 # arbitrarily say building files takes half the total time DownloadBase.set_progress(task, current_progress, 100) fpath = _get_file_path(build, include_multimedia_files, include_index_files, build_profile_id, download_targeted_version) # Don't rebuild the file if it is already there if not (os.path.isfile(fpath) and settings.SHARED_DRIVE_CONF.transfer_enabled): files, errors, file_count = _build_ccz_files( build, build_profile_id, include_multimedia_files, include_index_files, download_id, compress_zip, filename, download_targeted_version) file_cache = _zip_files_for_ccz(fpath, files, current_progress, file_progress, file_count, compression, task) if toggles.LOCALE_ID_INTEGRITY.enabled(build.domain): locale_errors = find_missing_locale_ids_in_ccz(file_cache) if locale_errors: errors.extend(locale_errors) notify_exception( None, message= "CCZ missing locale ids from default/app_strings.txt", details={ 'domain': build.domain, 'app_id': build.id, 'errors': locale_errors }) if include_index_files and include_multimedia_files: multimedia_errors = check_ccz_multimedia_integrity( build.domain, fpath) errors.extend(multimedia_errors) if multimedia_errors: notify_exception(None, message="CCZ missing multimedia files", details={ 'domain': build.domain, 'app_id': build.id, 'errors': multimedia_errors }) if errors: os.remove(fpath) raise Exception('\t' + '\t'.join(errors)) else: DownloadBase.set_progress(task, current_progress + file_progress, 100) if expose_link: _expose_download_link(fpath, filename, compress_zip, download_id) DownloadBase.set_progress(task, 100, 100) return fpath
def notify_exception(self, message=None, details=None): self.notify_error(message, details) notify_exception(None, message, details)
def get_app_view_context(request, app): """ This provides the context to render commcare settings on Edit Application Settings page This is where additional app or domain specific context can be added to any individual commcare-setting defined in commcare-app-settings.yaml or commcare-profile-settings.yaml """ context = {} settings_layout = copy.deepcopy( get_commcare_settings_layout(request.user)[app.get_doc_type()]) for section in settings_layout: new_settings = [] for setting in section['settings']: toggle_name = setting.get('toggle') if toggle_name and not toggle_enabled(request, toggle_name): continue privilege_name = setting.get('privilege') if privilege_name and not has_privilege(request, privilege_name): continue disable_if_true = setting.get('disable_if_true') if disable_if_true and getattr(app, setting['id']): continue new_settings.append(setting) section['settings'] = new_settings app_view_options = { 'permissions': { 'cloudcare': has_privilege(request, privileges.CLOUDCARE), }, 'sections': settings_layout, 'urls': { 'save': reverse("edit_commcare_settings", args=(app.domain, app.id)), }, 'user': { 'is_previewer': request.couch_user.is_previewer(), }, 'values': get_settings_values(app), 'warning': _("This is not an allowed value for this field"), } if toggles.CUSTOM_PROPERTIES.enabled( request.domain) and 'custom_properties' in getattr( app, 'profile', {}): custom_properties_array = [{ 'key': p[0], 'value': p[1] } for p in app.profile.get('custom_properties').items()] app_view_options.update({'customProperties': custom_properties_array}) context.update({ 'app_view_options': app_view_options, }) build_config = CommCareBuildConfig.fetch() options = build_config.get_menu() if not request.user.is_superuser: options = [option for option in options if not option.superuser_only] options_map = defaultdict(lambda: {"values": [], "value_names": []}) for option in options: builds = options_map[option.build.major_release()] builds["values"].append(option.build.to_string()) builds["value_names"].append(option.get_label()) if "default" not in builds: app_ver = MAJOR_RELEASE_TO_VERSION[option.build.major_release()] builds["default"] = build_config.get_default(app_ver).to_string() def _get_setting(setting_type, setting_id): # get setting dict from settings_layout if not settings_layout: return None matched = [ x for x in [ setting for section in settings_layout for setting in section['settings'] ] if x['type'] == setting_type and x['id'] == setting_id ] if matched: return matched[0] else: return None build_spec_setting = _get_setting('hq', 'build_spec') if build_spec_setting: build_spec_setting['options_map'] = options_map build_spec_setting['default_app_version'] = app.application_version practice_user_setting = _get_setting('hq', 'practice_mobile_worker_id') if practice_user_setting and has_privilege( request, privileges.PRACTICE_MOBILE_WORKERS): try: practice_users = get_practice_mode_mobile_workers(request.domain) except ESError: notify_exception(request, 'Error getting practice mode mobile workers') practice_users = [] practice_user_setting['values'] = [''] + [ u['_id'] for u in practice_users ] practice_user_setting['value_names'] = [_('Not set')] + [ u['username'] for u in practice_users ] context.update({ 'bulk_ui_translation_upload': { 'action': reverse('upload_bulk_ui_translations', args=(app.domain, app.get_id)), 'download_url': reverse('download_bulk_ui_translations', args=(app.domain, app.get_id)), 'adjective': _(u"U\u200BI translation"), 'plural_noun': _(u"U\u200BI translations"), }, 'bulk_app_translation_upload': { 'action': reverse('upload_bulk_app_translations', args=(app.domain, app.get_id)), 'download_url': reverse('download_bulk_app_translations', args=(app.domain, app.get_id)), 'adjective': _("app translation"), 'plural_noun': _("app translations"), }, }) context.update({ 'bulk_ui_translation_form': get_bulk_upload_form(context, context_key="bulk_ui_translation_upload"), 'bulk_app_translation_form': get_bulk_upload_form(context, context_key="bulk_app_translation_upload") }) # Not used in APP_MANAGER_V2 context['is_app_view'] = True try: context['fetchLimit'] = int( request.GET.get('limit', DEFAULT_FETCH_LIMIT)) except ValueError: context['fetchLimit'] = DEFAULT_FETCH_LIMIT if app.get_doc_type() == 'LinkedApplication': try: context['master_version'] = app.get_master_version() except RemoteRequestError: pass return context
def send_first_message(domain, recipient, phone_entry_or_number, session, responses, logged_subevent, workflow): # This try/except section is just here (temporarily) to support future refactors # If any of these notify, they should be replaced with a comment as to why the two are different # so that someone refactoring in the future will know that this or that param is necessary. try: if session.workflow != workflow: # see if we can eliminate the workflow arg notify_error('Exploratory: session.workflow != workflow', details={ 'session.workflow': session.workflow, 'workflow': workflow }) if session.connection_id != recipient.get_id: # see if we can eliminate the recipient arg notify_error( 'Exploratory: session.connection_id != recipient.get_id', details={ 'session.connection_id': session.connection_id, 'recipient.get_id': recipient.get_id, 'recipient': recipient }) if session.related_subevent != logged_subevent: # see if we can eliminate the logged_subevent arg notify_error( 'Exploratory: session.related_subevent != logged_subevent', details={ 'session.connection_id': session.connection_id, 'logged_subevent': logged_subevent }) except Exception: # The above running is not mission critical, so if it errors just leave a message in the log # for us to follow up on. # Absence of the message below and messages above ever notifying # will indicate that we can remove these args. notify_exception( None, "Error in section of code that's just supposed help inform future refactors" ) if toggles.ONE_PHONE_NUMBER_MULTIPLE_CONTACTS.enabled(domain): if not XFormsSessionSynchronization.claim_channel_for_session(session): send_first_message.apply_async( args=(domain, recipient, phone_entry_or_number, session, responses, logged_subevent, workflow), countdown=60) return metrics_counter('commcare.smsforms.session_started', 1, tags={ 'domain': domain, 'workflow': workflow }) if len(responses) > 0: text_responses = _responses_to_text(responses) message = format_message_list(text_responses) events = get_events_from_responses(responses) metadata = MessageMetadata(workflow=workflow, xforms_session_couch_id=session.couch_id, messaging_subevent_id=logged_subevent.pk) if isinstance(phone_entry_or_number, PhoneNumber): send_sms_to_verified_number(phone_entry_or_number, message, metadata, logged_subevent=logged_subevent, events=events) else: send_sms(domain, recipient, phone_entry_or_number, message, metadata) logged_subevent.completed()
def build_async_indicators(indicator_doc_ids): # written to be used with _queue_indicators, indicator_doc_ids must # be a chunk of 100 memoizers = {'configs': {}, 'adapters': {}} assert (len(indicator_doc_ids)) <= ASYNC_INDICATOR_CHUNK_SIZE def handle_exception(exception, config_id, doc, adapter): metric = None if isinstance(exception, (ProtocolError, ReadTimeout)): metric = 'commcare.async_indicator.riak_error' elif isinstance(exception, (ESError, ConnectionTimeout)): # a database had an issue so log it and go on to the next document metric = 'commcare.async_indicator.es_error' elif isinstance(exception, (DatabaseError, InternalError)): # a database had an issue so log it and go on to the next document metric = 'commcare.async_indicator.psql_error' else: # getting the config could fail before the adapter is set if adapter: adapter.handle_exception(doc, exception) if metric: metrics_counter(metric, tags={ 'config_id': config_id, 'doc_id': doc['_id'] }) def doc_ids_from_rows(rows): formatted_rows = [{ column.column.database_column_name.decode('utf-8'): column.value for column in row } for row in rows] return set(row['doc_id'] for row in formatted_rows) def _get_config(config_id): config_by_id = memoizers['configs'] if config_id in config_by_id: return config_by_id[config_id] else: config = _get_config_by_id(config_id) config_by_id[config_id] = config return config def _get_adapter(config): adapter_by_config = memoizers['adapters'] if config._id in adapter_by_config: return adapter_by_config[config._id] else: adapter = get_indicator_adapter( config, load_source='build_async_indicators') adapter_by_config[config._id] = adapter return adapter def _metrics_timer(step, config_id=None): tags = { 'action': step, } if config_id and settings.ENTERPRISE_MODE: tags['config_id'] = config_id else: # Prometheus requires consistent tags even if not available tags['config_id'] = None return metrics_histogram_timer('commcare.async_indicator.timing', timing_buckets=(.03, .1, .3, 1, 3, 10), tags=tags) # tracks processed/deleted configs to be removed from each indicator configs_to_remove_by_indicator_id = defaultdict(list) def _mark_config_to_remove(config_id, indicator_ids): for _id in indicator_ids: configs_to_remove_by_indicator_id[_id].append(config_id) timer = TimingContext() lock_keys = [ get_async_indicator_modify_lock_key(indicator_doc_id) for indicator_doc_id in indicator_doc_ids ] with CriticalSection(lock_keys): all_indicators = AsyncIndicator.objects.filter( doc_id__in=indicator_doc_ids) if not all_indicators: return doc_store = get_document_store_for_doc_type( all_indicators[0].domain, all_indicators[0].doc_type, load_source="build_async_indicators", ) failed_indicators = set() rows_to_save_by_adapter = defaultdict(list) docs_to_delete_by_adapter = defaultdict(list) # there will always be one AsyncIndicator per doc id indicator_by_doc_id = {i.doc_id: i for i in all_indicators} config_ids = set() with timer: for doc in doc_store.iter_documents( list(indicator_by_doc_id.keys())): indicator = indicator_by_doc_id[doc['_id']] eval_context = EvaluationContext(doc) for config_id in indicator.indicator_config_ids: with _metrics_timer('transform', config_id): config_ids.add(config_id) try: config = _get_config(config_id) except (ResourceNotFound, StaticDataSourceConfigurationNotFoundError): celery_task_logger.info( "{} no longer exists, skipping".format( config_id)) # remove because the config no longer exists _mark_config_to_remove(config_id, [indicator.pk]) continue except ESError: celery_task_logger.info( "ES errored when trying to retrieve config") failed_indicators.add(indicator) continue adapter = None try: adapter = _get_adapter(config) rows_to_save = adapter.get_all_values( doc, eval_context) if rows_to_save: rows_to_save_by_adapter[adapter].extend( rows_to_save) else: docs_to_delete_by_adapter[adapter].append(doc) eval_context.reset_iteration() except Exception as e: failed_indicators.add(indicator) handle_exception(e, config_id, doc, adapter) with _metrics_timer('single_batch_update'): for adapter, rows in rows_to_save_by_adapter.items(): doc_ids = doc_ids_from_rows(rows) indicators = [ indicator_by_doc_id[doc_id] for doc_id in doc_ids ] try: with _metrics_timer('update', adapter.config._id): adapter.save_rows(rows, use_shard_col=True) except Exception as e: failed_indicators.union(indicators) message = str(e) notify_exception( None, "Exception bulk saving async indicators:{}".format( message)) else: # remove because it's successfully processed _mark_config_to_remove(config_id, [i.pk for i in indicators]) with _metrics_timer('single_batch_delete'): for adapter, docs in docs_to_delete_by_adapter.items(): with _metrics_timer('delete', adapter.config._id): adapter.bulk_delete(docs) # delete fully processed indicators processed_indicators = set(all_indicators) - failed_indicators AsyncIndicator.objects.filter( pk__in=[i.pk for i in processed_indicators]).delete() # update failure for failed indicators with transaction.atomic(): for indicator in failed_indicators: indicator.update_failure( configs_to_remove_by_indicator_id.get(indicator.pk, [])) indicator.save() metrics_counter('commcare.async_indicator.processed_success', len(processed_indicators)) metrics_counter('commcare.async_indicator.processed_fail', len(failed_indicators)) metrics_counter('commcare.async_indicator.processing_time', timer.duration, tags={'config_ids': config_ids}) metrics_counter('commcare.async_indicator.processed_total', len(indicator_doc_ids), tags={'config_ids': config_ids})
def _process_form(request, domain, app_id, user_id, authenticated, auth_cls=AuthContext): if should_ignore_submission(request): # silently ignore submission if it meets ignore-criteria return SubmissionPost.submission_ignored_response() if toggles.FORM_SUBMISSION_BLACKLIST.enabled(domain): return SubmissionPost.get_blacklisted_response() try: instance, attachments = couchforms.get_instance_and_attachment(request) except MultimediaBug as e: try: instance = request.FILES[MAGIC_PROPERTY].read() xform = convert_xform_to_json(instance) meta = xform.get("meta", {}) except: meta = {} details = { "domain": domain, "app_id": app_id, "user_id": user_id, "authenticated": authenticated, "form_meta": meta, } log_counter(MULTIMEDIA_SUBMISSION_ERROR_COUNT, details) notify_exception(None, "Received a submission with POST.keys()", details) return HttpResponseBadRequest(e.message) app_id, build_id = get_app_and_build_ids(domain, app_id) submission_post = SubmissionPost( instance=instance, attachments=attachments, domain=domain, app_id=app_id, build_id=build_id, auth_context=auth_cls( domain=domain, user_id=user_id, authenticated=authenticated, ), location=couchforms.get_location(request), received_on=couchforms.get_received_on(request), date_header=couchforms.get_date_header(request), path=couchforms.get_path(request), submit_ip=couchforms.get_submit_ip(request), last_sync_token=couchforms.get_last_sync_token(request), openrosa_headers=couchforms.get_openrosa_headers(request), ) with TimingContext() as timer: result = submission_post.run() response = result.response tags = [ 'backend:sql' if should_use_sql_backend(domain) else 'backend:couch', u'domain:{}'.format(domain) ] datadog_counter('commcare.xform_submissions.count', tags=tags + ['status_code:{}'.format(response.status_code)]) if response.status_code == 400: logging.error( 'Status code 400 for a form submission. ' 'Response is: \n{0}\n' ) elif response.status_code == 201: datadog_gauge('commcare.xform_submissions.timings', timer.duration, tags=tags) # normalize over number of items (form or case) saved normalized_time = timer.duration / (1 + len(result.cases)) datadog_gauge('commcare.xform_submissions.normalized_timings', normalized_time, tags=tags) datadog_gauge('commcare.xform_submissions.case_count', len(result.cases), tags=tags) datadog_gauge('commcare.xform_submissions.ledger_count', len(result.ledgers), tags=tags) return response
def write_export_instance(writer, export_instance, documents, progress_tracker=None): """ Write rows to the given open _Writer. Rows will be written to each table in the export instance for each of the given documents. :param writer: An open _Writer :param export_instance: An ExportInstance :param documents: A ScanResult, or if progress_tracker is None, any iterable yielding documents :param progress_tracker: A task for soil to track progress against :return: None """ if progress_tracker: DownloadBase.set_progress(progress_tracker, 0, documents.count) start = _time_in_milliseconds() total_bytes = 0 total_rows = 0 compute_total = 0 write_total = 0 for row_number, doc in enumerate(documents): total_bytes += sys.getsizeof(doc) for table in export_instance.selected_tables: compute_start = _time_in_milliseconds() try: rows = table.get_rows( doc, row_number, split_columns=export_instance.split_multiselects, transform_dates=export_instance.transform_dates, ) except Exception as e: notify_exception(None, "Error exporting doc", details={ 'domain': export_instance.domain, 'export_instance_id': export_instance.get_id, 'export_table': table.label, 'doc_id': doc.get('_id'), }) e.sentry_capture = False raise compute_total += _time_in_milliseconds() - compute_start write_start = _time_in_milliseconds() for row in rows: # It might be bad to write one row at a time when you can do more (from a performance perspective) # Regardless, we should handle the batching of rows in the _Writer class, not here. writer.write(table, row) write_total += _time_in_milliseconds() - write_start total_rows += len(rows) if progress_tracker: DownloadBase.set_progress(progress_tracker, row_number + 1, documents.count) end = _time_in_milliseconds() tags = ['format:{}'.format(writer.format)] _record_datadog_export_write_rows(write_total, total_bytes, total_rows, tags) _record_datadog_export_compute_rows(compute_total, total_bytes, total_rows, tags) _record_datadog_export_duration(end - start, total_bytes, total_rows, tags)
def _process_form(request, domain, app_id, user_id, authenticated, auth_cls=AuthContext): if should_ignore_submission(request): # silently ignore submission if it meets ignore-criteria return SubmissionPost.submission_ignored_response() try: instance, attachments = couchforms.get_instance_and_attachment(request) except MultimediaBug as e: try: instance = request.FILES[MAGIC_PROPERTY].read() xform = convert_xform_to_json(instance) meta = xform.get("meta", {}) except: meta = {} details = { "domain": domain, "app_id": app_id, "user_id": user_id, "authenticated": authenticated, "form_meta": meta, } log_counter(MULTIMEDIA_SUBMISSION_ERROR_COUNT, details) notify_exception(None, "Received a submission with POST.keys()", details) return HttpResponseBadRequest(e.message) app_id, build_id = get_app_and_build_ids(domain, app_id) response = SubmissionPost( instance=instance, attachments=attachments, domain=domain, app_id=app_id, build_id=build_id, auth_context=auth_cls( domain=domain, user_id=user_id, authenticated=authenticated, ), location=couchforms.get_location(request), received_on=couchforms.get_received_on(request), date_header=couchforms.get_date_header(request), path=couchforms.get_path(request), submit_ip=couchforms.get_submit_ip(request), last_sync_token=couchforms.get_last_sync_token(request), openrosa_headers=couchforms.get_openrosa_headers(request), ).get_response() if response.status_code == 400: db_response = get_db('couchlog').save_doc({ 'request': unicode(request), 'response': unicode(response), }) logging.error( 'Status code 400 for a form submission. ' 'Response is: \n{0}\n' 'See couchlog db for more info: {1}'.format( unicode(response), db_response['id'], ) ) return response
def filter_cases(request, domain, app_id, module_id, parent_id=None): app = Application.get(app_id) module = app.get_module(module_id) auth_cookie = request.COOKIES.get('sessionid') requires_parent_cases = string_to_boolean( request.GET.get('requires_parent_cases', 'false')) xpath = EntriesHelper.get_filter_xpath(module) instances = get_instances_for_module(app, module, additional_xpaths=[xpath]) extra_instances = [{'id': inst.id, 'src': inst.src} for inst in instances] accessor = CaseAccessors(domain) # touchforms doesn't like this to be escaped xpath = HTMLParser.HTMLParser().unescape(xpath) case_type = module.case_type if xpath or should_use_sql_backend(domain): # if we need to do a custom filter, send it to touchforms for processing additional_filters = { "properties/case_type": case_type, "footprint": True } helper = BaseSessionDataHelper(domain, request.couch_user) result = helper.filter_cases(xpath, additional_filters, DjangoAuth(auth_cookie), extra_instances=extra_instances) if result.get('status', None) == 'error': code = result.get('code', 500) message = result.get( 'message', _("Something went wrong filtering your cases.")) if code == 500: notify_exception(None, message=message) return json_response(message, status_code=code) case_ids = result.get("cases", []) else: # otherwise just use our built in api with the defaults case_ids = [ res.id for res in get_filtered_cases( domain, status=CASE_STATUS_OPEN, case_type=case_type, user_id=request.couch_user._id, footprint=True, ids_only=True, ) ] cases = accessor.get_cases(case_ids) if parent_id: cases = filter(lambda c: c.parent and c.parent.case_id == parent_id, cases) # refilter these because we might have accidentally included footprint cases # in the results from touchforms. this is a little hacky but the easiest # (quick) workaround. should be revisted when we optimize the case list. cases = filter(lambda c: c.type == case_type, cases) cases = [c.to_api_json(lite=True) for c in cases if c] response = {'cases': cases} if requires_parent_cases: # Subtract already fetched cases from parent list parent_ids = set(map(lambda c: c['indices']['parent']['case_id'], cases)) - \ set(map(lambda c: c['case_id'], cases)) parents = accessor.get_cases(list(parent_ids)) parents = [c.to_api_json(lite=True) for c in parents] response.update({'parents': parents}) return json_response(response)
def _send_emergency_order_request(order, attempt): """ Sends the emergency order request to Zipline. :param order: the EmergencyOrder that should be sent :param attempt: the current attempt number; in the event of errors, a total of MAX_ATTEMPTS will be made, separated by a wait time of RETRY_INTERVAL minutes :return: the new status to be set on the order """ order.zipline_request_attempts += 1 json_payload = get_json_payload_from_order(order) json_payload = json.dumps(json_payload) response = requests.post(settings.ZIPLINE_API_URL, auth=HTTPBasicAuth(settings.ZIPLINE_API_USER, settings.ZIPLINE_API_PASSWORD), data=json_payload, headers={'Content-Type': 'application/json'}, timeout=REQUEST_TIMEOUT) if response.status_code != 200: handle_emergency_order_request_retry(order, attempt) create_error_record( order, 'Received HTTP Response {} from Zipline'.format( response.status_code)) return EmergencyOrderStatusUpdate.STATUS_ERROR response_text = response.text try: response_json = json.loads(response_text) except (TypeError, ValueError): notify_exception(None, message='[ZIPLINE] Invalid JSON response received', details={ 'order_id': order.pk, 'attempt': attempt, }) create_error_record( order, 'Could not parse JSON response from Zipline: {}'.format( response_text)) handle_emergency_order_request_retry(order, attempt) return EmergencyOrderStatusUpdate.STATUS_ERROR status = response_json.get('status') if status == ZIPLINE_STATUS_RECEIVED: handle_request_received(order) return EmergencyOrderStatusUpdate.STATUS_RECEIVED elif status == ZIPLINE_STATUS_REJECTED: reason = response_json.get('reason') handle_request_rejected(order, reason) return EmergencyOrderStatusUpdate.STATUS_REJECTED elif status == ZIPLINE_STATUS_ERROR: description = response_json.get('description') create_error_record( order, 'Error received from Zipline: {}'.format(description)) return EmergencyOrderStatusUpdate.STATUS_ERROR else: create_error_record( order, 'Unrecognized status received from Zipline: {}'.format(status)) handle_emergency_order_request_retry(order, attempt) return EmergencyOrderStatusUpdate.STATUS_ERROR
def get_form_view_context_and_template(request, domain, form, langs, messages=messages): xform_questions = [] xform = None form_errors = [] xform_validation_errored = False try: xform = form.wrapped_xform() except XFormException as e: form_errors.append(u"Error in form: %s" % e) except Exception as e: logging.exception(e) form_errors.append(u"Unexpected error in form: %s" % e) if xform and xform.exists(): if xform.already_has_meta(): messages.warning( request, "This form has a meta block already! " "It may be replaced by CommCare HQ's standard meta block." ) try: form.validate_form() xform_questions = xform.get_questions(langs, include_triggers=True, form=form) except etree.XMLSyntaxError as e: form_errors.append(u"Syntax Error: %s" % e) except AppEditingError as e: form_errors.append(u"Error in application: %s" % e) except XFormValidationError: xform_validation_errored = True # showing these messages is handled by validate_form_for_build ajax pass except XFormException as e: form_errors.append(u"Error in form: %s" % e) # any other kind of error should fail hard, # but for now there are too many for that to be practical except Exception as e: if settings.DEBUG: raise notify_exception(request, 'Unexpected Build Error') form_errors.append(u"Unexpected System Error: %s" % e) else: # remove upload questions (attachemnts) until MM Case Properties # are released to general public is_previewer = toggles.MM_CASE_PROPERTIES.enabled(request.user.username) xform_questions = [q for q in xform_questions if q["tag"] != "upload" or is_previewer] try: form_action_errors = form.validate_for_build() if not form_action_errors: form.add_stuff_to_xform(xform) except CaseError as e: messages.error(request, u"Error in Case Management: %s" % e) except XFormException as e: messages.error(request, unicode(e)) except Exception as e: if settings.DEBUG: raise logging.exception(unicode(e)) messages.error(request, u"Unexpected Error: %s" % e) try: languages = xform.get_languages() except Exception: languages = [] for err in form_errors: messages.error(request, err) module_case_types = [] app = form.get_app() all_modules = list(app.get_modules()) for module in all_modules: for case_type in module.get_case_types(): module_case_types.append({ 'id': module.unique_id, 'module_name': trans(module.name, langs), 'case_type': case_type, 'module_type': module.doc_type }) if not form.unique_id: form.get_unique_id() app.save() form_has_schedule = isinstance(form, AdvancedForm) and form.get_module().has_schedule module_filter_preview = feature_previews.MODULE_FILTER.enabled(request.domain) context = { 'nav_form': form, 'xform_languages': languages, "xform_questions": xform_questions, 'case_reserved_words_json': load_case_reserved_words(), 'module_case_types': module_case_types, 'form_errors': form_errors, 'xform_validation_errored': xform_validation_errored, 'allow_cloudcare': app.application_version == APP_V2 and isinstance(form, Form), 'allow_form_copy': isinstance(form, Form), 'allow_form_filtering': (module_filter_preview or (not isinstance(form, CareplanForm) and not form_has_schedule)), 'allow_form_workflow': not isinstance(form, CareplanForm), 'allow_usercase': domain_has_privilege(request.domain, privileges.USER_CASE), 'is_usercase_in_use': is_usercase_in_use(request.domain), 'is_module_filter_enabled': (feature_previews.MODULE_FILTER.enabled(request.domain) and app.enable_module_filtering), 'edit_name_url': reverse('edit_form_attr', args=[app.domain, app.id, form.unique_id, 'name']), 'case_xpath_pattern_matches': CASE_XPATH_PATTERN_MATCHES, 'case_xpath_substring_matches': CASE_XPATH_SUBSTRING_MATCHES, 'user_case_xpath_pattern_matches': USER_CASE_XPATH_PATTERN_MATCHES, 'user_case_xpath_substring_matches': USER_CASE_XPATH_SUBSTRING_MATCHES, } if tours.NEW_APP.is_enabled(request.user): request.guided_tour = tours.NEW_APP.get_tour_data() if context['allow_form_workflow'] and toggles.FORM_LINK_WORKFLOW.enabled(domain): module = form.get_module() def qualified_form_name(form, auto_link): module_name = trans(form.get_module().name, langs) form_name = trans(form.name, app.langs) star = '* ' if auto_link else ' ' return u"{}{} -> {}".format(star, module_name, form_name) modules = filter(lambda m: m.case_type == module.case_type, all_modules) if getattr(module, 'root_module_id', None) and module.root_module not in modules: modules.append(module.root_module) modules.extend([mod for mod in module.get_child_modules() if mod not in modules]) auto_linkable_forms = list(itertools.chain.from_iterable(list(m.get_forms()) for m in modules)) def linkable_form(candidate_form): auto_link = candidate_form in auto_linkable_forms return { 'unique_id': candidate_form.unique_id, 'name': qualified_form_name(candidate_form, auto_link), 'auto_link': auto_link } context['linkable_forms'] = [ linkable_form(candidate_form) for candidate_module in all_modules for candidate_form in candidate_module.get_forms() ] if isinstance(form, CareplanForm): context.update({ 'mode': form.mode, 'fixed_questions': form.get_fixed_questions(), 'custom_case_properties': [ {'key': key, 'path': path} for key, path in form.custom_case_updates.items() ], 'case_preload': [ {'key': key, 'path': path} for key, path in form.case_preload.items() ], }) return "app_manager/form_view_careplan.html", context elif isinstance(form, AdvancedForm): def commtrack_programs(): if app.commtrack_enabled: programs = Program.by_domain(app.domain) return [{'value': program.get_id, 'label': program.name} for program in programs] else: return [] all_programs = [{'value': '', 'label': _('All Programs')}] context.update({ 'show_custom_ref': toggles.APP_BUILDER_CUSTOM_PARENT_REF.enabled(request.user.username), 'commtrack_programs': all_programs + commtrack_programs(), }) context.update(get_schedule_context(form)) return "app_manager/form_view_advanced.html", context else: context.update({ 'show_custom_ref': toggles.APP_BUILDER_CUSTOM_PARENT_REF.enabled(request.user.username), }) return "app_manager/form_view.html", context
def send_HTML_email(subject, recipient, html_content, text_content=None, cc=None, email_from=settings.DEFAULT_FROM_EMAIL, file_attachments=None, bcc=None, smtp_exception_skip_list=None, messaging_event_id=None): recipients = list(recipient) if not isinstance(recipient, str) else [ recipient ] filtered_recipients = get_valid_recipients(recipients) bounced_addresses = list(set(recipients) - set(filtered_recipients)) if bounced_addresses and messaging_event_id: mark_subevent_bounced(bounced_addresses, messaging_event_id) if not filtered_recipients: # todo address root issues by throwing a real error to catch upstream # fail silently for now to fix time-sensitive SES issue return if not isinstance(html_content, str): html_content = html_content.decode('utf-8') if not text_content: text_content = getattr(settings, 'NO_HTML_EMAIL_MESSAGE', NO_HTML_EMAIL_MESSAGE) elif not isinstance(text_content, str): text_content = text_content.decode('utf-8') headers = {'From': email_from} # From-header if settings.RETURN_PATH_EMAIL: headers['Return-Path'] = settings.RETURN_PATH_EMAIL if messaging_event_id is not None: headers[COMMCARE_MESSAGE_ID_HEADER] = messaging_event_id if settings.SES_CONFIGURATION_SET is not None: headers[SES_CONFIGURATION_SET_HEADER] = settings.SES_CONFIGURATION_SET connection = get_connection() msg = EmailMultiAlternatives(subject, text_content, email_from, filtered_recipients, headers=headers, connection=connection, cc=cc, bcc=bcc) for file in (file_attachments or []): if file: msg.attach(file["title"], file["file_obj"].getvalue(), file["mimetype"]) msg.attach_alternative(html_content, "text/html") try: msg.send() except SMTPDataError as e: # If the SES configuration has not been properly set up, resend the message if ("Configuration Set does not exist" in e.smtp_error and SES_CONFIGURATION_SET_HEADER in msg.extra_headers): del msg.extra_headers[SES_CONFIGURATION_SET_HEADER] msg.send() notify_exception(None, message="SES Configuration Set missing", details={'error': e}) else: raise except SMTPSenderRefused as e: if smtp_exception_skip_list and e.smtp_code in smtp_exception_skip_list: raise e else: error_subject = _('ERROR: Could not send "%(subject)s"') % { 'subject': subject, } if e.smtp_code in LARGE_FILE_SIZE_ERROR_CODES: error_text = _('Could not send email: file size is too large.') else: error_text = e.smtp_error error_text = '%s\n\n%s' % ( error_text, _('Please contact %(support_email)s for assistance.') % { 'support_email': settings.SUPPORT_EMAIL, }, ) error_msg = EmailMultiAlternatives( error_subject, error_text, email_from, filtered_recipients, headers=headers, connection=connection, cc=cc, bcc=bcc, ) error_msg.send()
def recipient_mother_person_case_from_ccs_record_case(case_schedule_instance): try: return mother_person_case_from_ccs_record_case(case_schedule_instance.case) except CaseRelationshipError: notify_exception(None, message="ICDS ccs_record relationship error") return None