Example #1
0
class FormDataInCaseIndicatorDefinition(CaseIndicatorDefinition,
                                        FormDataIndicatorDefinitionMixin):
    """
    Use this for when you want to grab all forms with the relevant xmlns in a case's xform_ids property and
    include a property from those forms as an indicator for this case.
    """
    question_id = StringProperty()
    _returns_multiple = True

    _admin_crud_class = FormDataInCaseAdminCRUDManager

    def get_related_forms(self, case):
        if not isinstance(case, CommCareCase) or not issubclass(
                case.__class__, CommCareCase):
            raise ValueError("case is not an instance of CommCareCase.")
        all_forms = case.get_forms()
        all_forms.reverse()
        related_forms = list()
        for form in all_forms:
            if form.xmlns == self.xmlns:
                related_forms.append(form)
        return related_forms
Example #2
0
class CaseIndicatorDefinition(BaseDocumentIndicatorDefinition):
    """
        This Indicator Definition defines an indicator that will live in the computed_ property of a CommCareCase
        document. The 'doc' passed through get_value and get_clean_value should be a CommCareCase.
    """
    case_type = StringProperty()
    base_doc = "CaseIndicatorDefinition"

    def get_clean_value(self, doc):
        if not isinstance(doc, CommCareCase) or not issubclass(
                doc.__class__, CommCareCase):
            raise ValueError(
                "The document provided must be an instance of CommCareCase.")
        if not doc.type == self.case_type:
            raise DocumentMismatchError(
                "The case provided should be a '%s' type case." %
                self.case_type)
        return super(CaseIndicatorDefinition, self).get_clean_value(doc)

    @classmethod
    def key_properties(cls):
        return ["namespace", "domain", "case_type", "slug"]
Example #3
0
class Toggle(Document):
    """
    A very simple implementation of a feature toggle. Just a list of items
    attached to a slug.
    """
    slug = StringProperty()
    enabled_users = ListProperty()
    last_modified = DateTimeProperty()

    def save(self, **params):
        if ('_id' not in self._doc):
            self._doc['_id'] = generate_toggle_id(self.slug)
        self.last_modified = datetime.utcnow()
        super(Toggle, self).save(**params)

    @classmethod
    def get(cls, docid, rev=None, db=None, dynamic_properties=True):
        if not docid.startswith(TOGGLE_ID_PREFIX):
            docid = generate_toggle_id(docid)
        return super(Toggle, cls).get(docid,
                                      rev=None,
                                      db=None,
                                      dynamic_properties=True)

    def add(self, item):
        """
        Adds an item to the toggle. Only saves if necessary.
        """
        if item not in self.enabled_users:
            self.enabled_users.append(item)
            self.save()

    def remove(self, item):
        """
        Removes an item from the toggle. Only saves if necessary.
        """
        if item in self.enabled_users:
            self.enabled_users.remove(item)
            self.save()
Example #4
0
class SubmissionErrorLog(XFormError):
    """
    When a hard submission error (typically bad XML) is received we save it
    here.
    """
    md5 = StringProperty()

    def __str__(self):
        return "Doc id: %s, Error %s" % (self.get_id, self.problem)

    def get_xml(self):
        return self.fetch_attachment(ATTACHMENT_NAME)

    def save(self, *args, **kwargs):
        # we have to override this because XFormError does too
        self["doc_type"] = "SubmissionErrorLog"
        # and we can't use super for the same reasons XFormError
        XFormInstance.save(self, *args, **kwargs)

    @property
    def is_submission_error_log(self):
        return True

    @classmethod
    def from_instance(cls, instance, message):
        """
        Create an instance of this record from a submission body
        """
        log = SubmissionErrorLog(
            received_on=datetime.datetime.utcnow(),
            md5=hashlib.md5(instance).hexdigest(),
            problem=message,
        )
        log.deferred_put_attachment(instance,
                                    ATTACHMENT_NAME,
                                    content_type="text/xml")
        return log
Example #5
0
class Dhis2Instance(Document):

    dhis2_version = StringProperty(default=None)
    dhis2_version_last_modified = DateTimeProperty(default=None)

    def get_api_version(self) -> int:
        if (self.dhis2_version is None
                or self.dhis2_version_last_modified + timedelta(days=365) <
                datetime.now()):
            # Fetching DHIS2 metadata is expensive. Only do it if we
            # don't know the version of DHIS2, or if we haven't checked
            # for over a year.
            self.update_dhis2_version()
        return get_api_version(self.dhis2_version)

    def update_dhis2_version(self):
        """
        Fetches metadata from DHIS2 instance and saves DHIS2 version.

        Notifies administrators if the version of DHIS2 exceeds the
        maximum supported version, but still saves and continues.
        """
        requests = self.connection_settings.get_requests(self)
        metadata = fetch_metadata(requests)
        dhis2_version = metadata["system"]["version"]
        try:
            get_api_version(dhis2_version)
        except Dhis2Exception as err:
            requests.notify_exception(str(err))
            raise
        if LooseVersion(dhis2_version) > DHIS2_MAX_VERSION:
            requests.notify_error(
                "Integration has not yet been tested for DHIS2 version "
                f"{dhis2_version}. Its API may not be supported.")
        self.dhis2_version = dhis2_version
        self.dhis2_version_last_modified = datetime.now()
        self.save()
Example #6
0
class ShortFormRepeater(Repeater):
    """
    Record that form id & case ids should be repeated to a new url

    """

    version = StringProperty(default=V2, choices=LEGAL_VERSIONS)
    friendly_name = _("Forward Form Stubs")

    payload_generator_classes = (ShortFormRepeaterJsonPayloadGenerator,)

    @memoized
    def payload_doc(self, repeat_record):
        return FormAccessors(repeat_record.domain).get_form(repeat_record.payload_id)

    def allowed_to_forward(self, payload):
        return payload.xmlns != DEVICE_LOG_XMLNS

    def get_headers(self, repeat_record):
        headers = super(ShortFormRepeater, self).get_headers(repeat_record)
        headers.update({
            "received-on": self.payload_doc(repeat_record).received_on.isoformat()+"Z"
        })
        return headers
Example #7
0
class ConstantString(ValueSource):
    """
    A constant value.

    Use the model's data types for the `serialize()` method to convert
    the value for the external system, if necessary.
    """
    # Example "person_property" value::
    #
    #     {
    #       "birthdate": {
    #         "doc_type": "ConstantString",
    #         "value": "Sep 7, 3761 BC"
    #       }
    #     }
    #
    value = StringProperty()

    def deserialize(self, external_value):
        # ConstantString doesn't have a corresponding case or form value
        return None

    def _get_commcare_value(self, case_trigger_info):
        return self.value
Example #8
0
class RegistrationRequest(Document):
    tos_confirmed = BooleanProperty(default=False)
    request_time = DateTimeProperty()
    request_ip = StringProperty()
    activation_guid = StringProperty()
    confirm_time = DateTimeProperty()
    confirm_ip = StringProperty()
    domain = StringProperty()
    new_user_username = StringProperty()
    requesting_user_username = StringProperty()

    @property
    @memoized
    def project(self):
        return Domain.get_by_name(self.domain)

    @classmethod
    def get_by_guid(cls, guid):
        result = cls.view("registration/requests_by_guid",
                          key=guid,
                          reduce=False,
                          include_docs=True).first()
        return result

    @classmethod
    def get_requests_today(cls):
        today = datetime.datetime.utcnow()
        yesterday = today - datetime.timedelta(1)
        result = cls.view("registration/requests_by_time",
                          startkey=yesterday.isoformat(),
                          endkey=today.isoformat(),
                          reduce=True).all()
        if not result:
            return 0
        return result[0]['value']

    @classmethod
    def get_request_for_username(cls, username):
        result = cls.view("registration/requests_by_username",
                          key=username,
                          reduce=False,
                          include_docs=True).first()
        return result
Example #9
0
class CommCareCaseAttachment(LooselyEqualDocumentSchema, IsImageMixin):
    identifier = StringProperty()
    attachment_src = StringProperty()
    attachment_from = StringProperty()
    attachment_name = StringProperty()
    server_mime = StringProperty()  # Server detected MIME
    server_md5 = StringProperty()  # Couch detected hash

    attachment_size = IntegerProperty()  # file size
    attachment_properties = DictProperty(
    )  # width, height, other relevant metadata

    @property
    def content_type(self):
        return self.server_mime

    @property
    def is_present(self):
        """
        Helper method to see if this is a delete vs. update

        NOTE this is related to but reversed logic from
        `casexml.apps.case.xml.parser.CaseAttachment.is_delete`.
        """
        return self.attachment_src or self.attachment_from

    @classmethod
    def from_case_index_update(cls, attachment):
        if attachment.attachment_src or attachment.attachment_from:
            guessed = mimetypes.guess_type(attachment.attachment_src)
            if len(guessed) > 0 and guessed[0] is not None:
                mime_type = guessed[0]
            else:
                mime_type = None

            ret = cls(identifier=attachment.identifier,
                      attachment_src=attachment.attachment_src,
                      attachment_from=attachment.attachment_from,
                      attachment_name=attachment.attachment_name,
                      server_mime=mime_type)
        else:
            ret = cls(identifier=attachment.identifier)
        return ret
Example #10
0
class Metadata(DocumentSchema):
    """
    Metadata of an xform, from a meta block structured like:

        <Meta>
            <timeStart />
            <timeEnd />
            <instanceID />
            <userID />
            <deviceID />
            <deprecatedID />
            <username />

            <!-- CommCare extension -->
            <appVersion />
            <location />
        </Meta>

    See spec: https://bitbucket.org/javarosa/javarosa/wiki/OpenRosaMetaDataSchema

    username is not part of the spec but included for convenience
    """
    timeStart = DateTimeProperty()
    timeEnd = DateTimeProperty()
    instanceID = StringProperty()
    userID = StringProperty()
    deviceID = StringProperty()
    deprecatedID = StringProperty()
    username = StringProperty()
    appVersion = StringProperty()
    location = GeoPointProperty()

    @property
    def commcare_version(self):
        from corehq.apps.receiverwrapper.util import get_commcare_version_from_appversion_text
        from distutils.version import LooseVersion
        version_text = get_commcare_version_from_appversion_text(
            self.appVersion)
        if version_text:
            return LooseVersion(version_text)
Example #11
0
class PropertyWeight(DocumentSchema):
    case_property = StringProperty()
    weight = DecimalProperty()
Example #12
0
class SimplifiedSyncLog(AbstractSyncLog):
    """
    New, simplified sync log class that is used by ownership cleanliness restore.

    Just maintains a flat list of case IDs on the phone rather than the case/dependent state
    lists from the SyncLog class.
    """
    log_format = StringProperty(default=LOG_FORMAT_SIMPLIFIED)
    case_ids_on_phone = SetProperty(six.text_type)
    # this is a subset of case_ids_on_phone used to flag that a case is only around because it has dependencies
    # this allows us to purge it if possible from other actions
    dependent_case_ids_on_phone = SetProperty(six.text_type)
    owner_ids_on_phone = SetProperty(six.text_type)
    index_tree = SchemaProperty(IndexTree)  # index tree of subcases / children
    extension_index_tree = SchemaProperty(
        IndexTree)  # index tree of extensions
    closed_cases = SetProperty(six.text_type)
    extensions_checked = BooleanProperty(default=False)
    device_id = StringProperty()

    _purged_cases = None

    @property
    def purged_cases(self):
        if self._purged_cases is None:
            self._purged_cases = set()
        return self._purged_cases

    def case_count(self):
        return len(self.case_ids_on_phone)

    def phone_is_holding_case(self, case_id):
        """
        Whether the phone currently has a case, according to this sync log
        """
        return case_id in self.case_ids_on_phone

    def get_footprint_of_cases_on_phone(self):
        return list(self.case_ids_on_phone)

    @property
    def primary_case_ids(self):
        return self.case_ids_on_phone - self.dependent_case_ids_on_phone

    def purge(self, case_id, xform_id=None):
        """
        This happens in 3 phases, and recursively tries to purge outgoing indices of purged cases.
        Definitions:
        -----------
        A case is *relevant* if:
        - it is open and owned or,
        - it has a relevant child or,
        - it has a relevant extension or,
        - it is the extension of a relevant case.

        A case is *available* if:
        - it is open and not an extension case or,
        - it is open and is the extension of an available case.

        A case is *live* if:
        - it is owned and available or,
        - it has a live child or,
        - it has a live extension or,
        - it is the exension of a live case.

        Algorithm:
        ----------
        1. Mark *relevant* cases
            Mark all open cases owned by the user relevant. Traversing all outgoing child
            and extension indexes, as well as all incoming extension indexes, mark all
            touched cases relevant.

        2. Mark *available* cases
            Mark all relevant cases that are open and have no outgoing extension indexes
            as available. Traverse incoming extension indexes which don't lead to closed
            cases, mark all touched cases as available.

        3. Mark *live* cases
            Mark all relevant, owned, available cases as live. Traverse incoming
            extension indexes which don't lead to closed cases, mark all touched
            cases as live.
        """
        _get_logger().debug("purging: {}".format(case_id))
        self.dependent_case_ids_on_phone.add(case_id)
        relevant = self._get_relevant_cases(case_id)
        available = self._get_available_cases(relevant)
        live = self._get_live_cases(available)
        to_remove = (relevant - self.purged_cases) - live
        self._remove_cases_purge_indices(to_remove, case_id, xform_id)

    def _get_relevant_cases(self, case_id):
        """
        Mark all open cases owned by the user relevant. Traversing all outgoing child
        and extension indexes, as well as all incoming extension indexes,
        mark all touched cases relevant.
        """
        relevant = IndexTree.get_all_dependencies(
            case_id,
            child_index_tree=self.index_tree,
            extension_index_tree=self.extension_index_tree,
        )
        _get_logger().debug("Relevant cases of {}: {}".format(
            case_id, relevant))
        return relevant

    def _get_available_cases(self, relevant):
        """
        Mark all relevant cases that are open and have no outgoing extension indexes
        as available. Traverse incoming extension indexes which don't lead to closed
        cases, mark all touched cases as available
        """
        incoming_extensions = self.extension_index_tree.reverse_indices
        available = {
            case
            for case in relevant if case not in self.closed_cases and (
                not self.extension_index_tree.indices.get(case)
                or self.index_tree.indices.get(case))
        }
        new_available = set() | available
        while new_available:
            case_to_check = new_available.pop()
            for incoming_extension in incoming_extensions.get(
                    case_to_check, []):
                closed = incoming_extension in self.closed_cases
                purged = incoming_extension in self.purged_cases
                if not closed and not purged:
                    new_available.add(incoming_extension)
            available = available | new_available
        _get_logger().debug("Available cases: {}".format(available))

        return available

    def _get_live_cases(self, available):
        """
        Mark all relevant, owned, available cases as live. Traverse incoming
        extension indexes which don't lead to closed cases, mark all touched
        cases as available.
        """
        primary_case_ids = self.primary_case_ids
        live = available & primary_case_ids
        new_live = set() | live
        checked = set()
        while new_live:
            case_to_check = new_live.pop()
            checked.add(case_to_check)
            new_live = new_live | IndexTree.get_all_outgoing_cases(
                case_to_check, self.index_tree,
                self.extension_index_tree) - self.purged_cases
            new_live = new_live | IndexTree.traverse_incoming_extensions(
                case_to_check,
                self.extension_index_tree,
                frozenset(self.closed_cases),
            ) - self.purged_cases
            new_live = new_live - checked
            live = live | new_live

        _get_logger().debug("live cases: {}".format(live))

        return live

    def _remove_cases_purge_indices(self, all_to_remove, checked_case_id,
                                    xform_id):
        """Remove all cases marked for removal. Traverse child cases and try to purge those too."""

        _get_logger().debug("cases to to_remove: {}".format(all_to_remove))
        for to_remove in all_to_remove:
            indices = self.index_tree.indices.get(to_remove, {})
            self._remove_case(to_remove, all_to_remove, checked_case_id,
                              xform_id)
            for referenced_case in indices.values():
                is_dependent_case = referenced_case in self.dependent_case_ids_on_phone
                already_primed_for_removal = referenced_case in all_to_remove
                if is_dependent_case and not already_primed_for_removal and referenced_case != checked_case_id:
                    self.purge(referenced_case, xform_id)

    def _remove_case(self, to_remove, all_to_remove, checked_case_id,
                     xform_id):
        """Removes case from index trees, case_ids_on_phone and dependent_case_ids_on_phone if pertinent"""
        _get_logger().debug('removing: {}'.format(to_remove))

        deleted_indices = self.index_tree.indices.pop(to_remove, {})
        deleted_indices.update(
            self.extension_index_tree.indices.pop(to_remove, {}))

        self._validate_case_removal(to_remove, all_to_remove, deleted_indices,
                                    checked_case_id, xform_id)

        try:
            self.case_ids_on_phone.remove(to_remove)
        except KeyError:
            should_fail_softly = not xform_id or _domain_has_legacy_toggle_set(
            )
            if should_fail_softly:
                pass
            else:
                # this is only a soft assert for now because of http://manage.dimagi.com/default.asp?181443
                # we should convert back to a real Exception when we stop getting any of these
                _assert = soft_assert(notify_admins=True,
                                      exponential_backoff=False)
                _assert(
                    False, 'case already remove from synclog', {
                        'case_id': to_remove,
                        'synclog_id': self._id,
                        'form_id': xform_id
                    })
        else:
            self.purged_cases.add(to_remove)

        if to_remove in self.dependent_case_ids_on_phone:
            self.dependent_case_ids_on_phone.remove(to_remove)

    def _validate_case_removal(self, case_to_remove, all_to_remove,
                               deleted_indices, checked_case_id, xform_id):
        """Traverse immediate outgoing indices. Validate that these are also candidates for removal."""
        if case_to_remove == checked_case_id:
            return

        # Logging removed temporarily: https://github.com/dimagi/commcare-hq/pull/16259#issuecomment-303176217
        # for index in deleted_indices.values():
        #     if xform_id and not _domain_has_legacy_toggle_set():
        #         # unblocking http://manage.dimagi.com/default.asp?185850
        #         _assert = soft_assert(send_to_ops=False, log_to_file=True, exponential_backoff=True,
        #                               fail_if_debug=True)
        #         _assert(index in (all_to_remove | set([checked_case_id])),
        #                 "expected {} in {} but wasn't".format(index, all_to_remove))

    def _add_primary_case(self, case_id):
        self.case_ids_on_phone.add(case_id)
        if case_id in self.dependent_case_ids_on_phone:
            self.dependent_case_ids_on_phone.remove(case_id)

    def _add_index(self, index, case_update):
        _get_logger().debug('adding index {} --<{}>--> {} ({}).'.format(
            index.case_id, index.relationship, index.referenced_id,
            index.identifier))
        if index.relationship == const.CASE_INDEX_EXTENSION:
            self._add_extension_index(index, case_update)
        else:
            self._add_child_index(index)

    def _add_extension_index(self, index, case_update):
        assert index.relationship == const.CASE_INDEX_EXTENSION
        self.extension_index_tree.set_index(index.case_id, index.identifier,
                                            index.referenced_id)

        if index.referenced_id not in self.case_ids_on_phone:
            self.case_ids_on_phone.add(index.referenced_id)
            self.dependent_case_ids_on_phone.add(index.referenced_id)

        case_child_indices = [
            idx for idx in case_update.indices_to_add
            if idx.relationship == const.CASE_INDEX_CHILD
            and idx.referenced_id == index.referenced_id
        ]
        if not case_child_indices and not case_update.is_live:
            # this case doesn't also have child indices, and it is not owned, so it is dependent
            self.dependent_case_ids_on_phone.add(index.case_id)

    def _add_child_index(self, index):
        assert index.relationship == const.CASE_INDEX_CHILD
        self.index_tree.set_index(index.case_id, index.identifier,
                                  index.referenced_id)
        if index.referenced_id not in self.case_ids_on_phone:
            self.case_ids_on_phone.add(index.referenced_id)
            self.dependent_case_ids_on_phone.add(index.referenced_id)

    def _delete_index(self, index):
        self.index_tree.delete_index(index.case_id, index.identifier)
        self.extension_index_tree.delete_index(index.case_id, index.identifier)

    def update_phone_lists(self, xform, case_list):
        made_changes = False
        _get_logger().debug('updating sync log for {}'.format(self.user_id))
        _get_logger().debug('case ids before update: {}'.format(', '.join(
            self.case_ids_on_phone)))
        _get_logger().debug('dependent case ids before update: {}'.format(
            ', '.join(self.dependent_case_ids_on_phone)))
        _get_logger().debug('index tree before update: {}'.format(
            self.index_tree))
        _get_logger().debug('extension index tree before update: {}'.format(
            self.extension_index_tree))

        class CaseUpdate(object):
            def __init__(self, case_id, owner_ids_on_phone):
                self.case_id = case_id
                self.owner_ids_on_phone = owner_ids_on_phone
                self.was_live_previously = True
                self.final_owner_id = None
                self.is_closed = None
                self.indices_to_add = []
                self.indices_to_delete = []

            @property
            def extension_indices_to_add(self):
                return [
                    index for index in self.indices_to_add
                    if index.relationship == const.CASE_INDEX_EXTENSION
                ]

            def has_extension_indices_to_add(self):
                return len(self.extension_indices_to_add) > 0

            @property
            def is_live(self):
                """returns whether an update is live for a specifc set of owner_ids"""
                if self.is_closed:
                    return False
                elif self.final_owner_id is None:
                    # we likely didn't touch owner_id so just default to whatever it was previously
                    return self.was_live_previously
                else:
                    return self.final_owner_id in self.owner_ids_on_phone

        ShortIndex = namedtuple(
            'ShortIndex',
            ['case_id', 'identifier', 'referenced_id', 'relationship'])

        # this is a variable used via closures in the function below
        owner_id_map = {}

        def get_latest_owner_id(case_id, action=None):
            # "latest" just means as this forms actions are played through
            if action is not None:
                owner_id_from_action = action.updated_known_properties.get(
                    "owner_id")
                if owner_id_from_action is not None:
                    owner_id_map[case_id] = owner_id_from_action
            return owner_id_map.get(case_id, None)

        all_updates = {}
        for case in case_list:
            if case.case_id not in all_updates:
                _get_logger().debug('initializing update for case {}'.format(
                    case.case_id))
                all_updates[case.case_id] = CaseUpdate(
                    case_id=case.case_id,
                    owner_ids_on_phone=self.owner_ids_on_phone)

            case_update = all_updates[case.case_id]
            case_update.was_live_previously = case.case_id in self.primary_case_ids
            actions = case.get_actions_for_form(xform)
            for action in actions:
                _get_logger().debug('{}: {}'.format(case.case_id,
                                                    action.action_type))
                owner_id = get_latest_owner_id(case.case_id, action)
                if owner_id is not None:
                    case_update.final_owner_id = owner_id
                if action.action_type == const.CASE_ACTION_INDEX:
                    for index in action.indices:
                        if index.referenced_id:
                            case_update.indices_to_add.append(
                                ShortIndex(case.case_id, index.identifier,
                                           index.referenced_id,
                                           index.relationship))
                        else:
                            case_update.indices_to_delete.append(
                                ShortIndex(case.case_id, index.identifier,
                                           None, None))
                elif action.action_type == const.CASE_ACTION_CLOSE:
                    case_update.is_closed = True

        non_live_updates = []
        for case in case_list:
            case_update = all_updates[case.case_id]
            if case_update.is_live:
                _get_logger().debug('case {} is live.'.format(
                    case_update.case_id))
                if case.case_id not in self.case_ids_on_phone:
                    self._add_primary_case(case.case_id)
                    made_changes = True
                elif case.case_id in self.dependent_case_ids_on_phone:
                    self.dependent_case_ids_on_phone.remove(case.case_id)
                    made_changes = True

                for index in case_update.indices_to_add:
                    self._add_index(index, case_update)
                    made_changes = True
                for index in case_update.indices_to_delete:
                    self._delete_index(index)
                    made_changes = True
Example #13
0
class AbstractSyncLog(SafeSaveDocument):
    date = DateTimeProperty()
    domain = StringProperty()
    user_id = StringProperty()
    build_id = StringProperty()  # only works with app-aware sync
    app_id = StringProperty()  # only works with app-aware sync

    previous_log_id = StringProperty()  # previous sync log, forming a chain
    duration = IntegerProperty()  # in seconds
    log_format = StringProperty()

    # owner_ids_on_phone stores the ids the phone thinks it's the owner of.
    # This typically includes the user id,
    # as well as all groups that that user is a member of.
    owner_ids_on_phone = StringListProperty()

    # for debugging / logging
    previous_log_rev = StringProperty(
    )  # rev of the previous log at the time of creation
    last_submitted = DateTimeProperty(
    )  # last time a submission caused this to be modified
    rev_before_last_submitted = StringProperty(
    )  # rev when the last submission was saved
    last_cached = DateTimeProperty(
    )  # last time this generated a cached response
    hash_at_last_cached = StringProperty(
    )  # the state hash of this when it was last cached

    # save state errors and hashes here
    had_state_error = BooleanProperty(default=False)
    error_date = DateTimeProperty()
    error_hash = StringProperty()
    cache_payload_paths = DictProperty()

    last_ucr_sync_times = SchemaListProperty(UCRSyncLog)

    strict = True  # for asserts

    @classmethod
    def wrap(cls, data):
        ret = super(AbstractSyncLog, cls).wrap(data)
        if hasattr(ret, 'has_assert_errors'):
            ret.strict = False
        return ret

    def save(self):
        self._synclog_sql = save_synclog_to_sql(self)

    def delete(self):
        if getattr(self, '_synclog_sql', None):
            self._synclog_sql.delete()

    def case_count(self):
        """
        How many cases are associated with this. Used in reports.
        """
        raise NotImplementedError()

    def phone_is_holding_case(self, case_id):
        raise NotImplementedError()

    def get_footprint_of_cases_on_phone(self):
        """
        Gets the phone's flat list of all case ids on the phone,
        owned or not owned but relevant.
        """
        raise NotImplementedError()

    def get_state_hash(self):
        return CaseStateHash(
            Checksum(self.get_footprint_of_cases_on_phone()).hexdigest())

    def update_phone_lists(self, xform, case_list):
        """
        Given a form an list of touched cases, update this sync log to reflect the updated
        state on the phone.
        """
        raise NotImplementedError()

    @classmethod
    def from_other_format(cls, other_sync_log):
        """
        Convert to an instance of a subclass from another subclass. Subclasses can
        override this to provide conversion functions.
        """
        raise IncompatibleSyncLogType('Unable to convert from {} to {}'.format(
            type(other_sync_log),
            cls,
        ))

    # anything prefixed with 'tests_only' is only used in tests
    def tests_only_get_cases_on_phone(self):
        raise NotImplementedError()

    def test_only_clear_cases_on_phone(self):
        raise NotImplementedError()

    def test_only_get_dependent_cases_on_phone(self):
        raise NotImplementedError()
Example #14
0
class UCRSyncLog(Document):
    report_uuid = StringProperty()
    datetime = DateTimeProperty()
Example #15
0
class ConstantString(ValueSource):
    value = StringProperty()

    def get_value(self, case_trigger_info):
        return self.value
Example #16
0
class EWSGhanaConfig(Document):
    enabled = BooleanProperty(default=False)
    domain = StringProperty()
    url = StringProperty(default="http://ewsghana.com/api/v0_1")
    username = StringProperty()
    password = StringProperty()
    steady_sync = BooleanProperty(default=False)
    all_stock_data = BooleanProperty(default=False)

    @classmethod
    def for_domain(cls, name):
        try:
            mapping = DocDomainMapping.objects.get(domain_name=name,
                                                   doc_type='EWSGhanaConfig')
            return cls.get(docid=mapping.doc_id)
        except DocDomainMapping.DoesNotExist:
            return None

    @classmethod
    def get_all_configs(cls):
        mappings = DocDomainMapping.objects.filter(doc_type='EWSGhanaConfig')
        configs = [cls.get(docid=mapping.doc_id) for mapping in mappings]
        return configs

    @classmethod
    def get_all_steady_sync_configs(cls):
        return [
            config for config in cls.get_all_configs() if config.steady_sync
        ]

    @classmethod
    def get_all_enabled_domains(cls):
        configs = cls.get_all_configs()
        return [
            c.domain for c in filter(lambda config: config.enabled, configs)
        ]

    @property
    def is_configured(self):
        return True if self.enabled and self.url and self.password and self.username else False

    def save(self, **params):
        super(EWSGhanaConfig, self).save(**params)

        self.update_toggle()

        try:
            DocDomainMapping.objects.get(doc_id=self._id,
                                         domain_name=self.domain,
                                         doc_type="EWSGhanaConfig")
        except DocDomainMapping.DoesNotExist:
            DocDomainMapping.objects.create(doc_id=self._id,
                                            domain_name=self.domain,
                                            doc_type='EWSGhanaConfig')
            add_to_module_map(self.domain, 'custom.ewsghana')

    def update_toggle(self):
        """
        This turns on the special stock handler when EWS is enabled.
        """

        if self.enabled:
            STOCK_AND_RECEIPT_SMS_HANDLER.set(self.domain, True,
                                              NAMESPACE_DOMAIN)

    class Meta:
        app_label = 'ewsghana'
Example #17
0
class RepeatRecord(Document):
    """
    An record of a particular instance of something that needs to be forwarded
    with a link to the proper repeater object
    """

    domain = StringProperty()
    repeater_id = StringProperty()
    repeater_type = StringProperty()
    payload_id = StringProperty()

    overall_tries = IntegerProperty(default=0)
    max_possible_tries = IntegerProperty(default=3)

    attempts = ListProperty(RepeatRecordAttempt)

    cancelled = BooleanProperty(default=False)
    registered_on = DateTimeProperty()
    last_checked = DateTimeProperty()
    failure_reason = StringProperty()
    next_check = DateTimeProperty()
    succeeded = BooleanProperty(default=False)

    @property
    def record_id(self):
        return self._id

    @classmethod
    def wrap(cls, data):
        should_bootstrap_attempts = ('attempts' not in data)

        self = super(RepeatRecord, cls).wrap(data)

        if should_bootstrap_attempts and self.last_checked:
            assert not self.attempts
            self.attempts = [RepeatRecordAttempt(
                cancelled=self.cancelled,
                datetime=self.last_checked,
                failure_reason=self.failure_reason,
                success_response=None,
                next_check=self.next_check,
                succeeded=self.succeeded,
            )]
        return self

    @property
    @memoized
    def repeater(self):
        try:
            return Repeater.get(self.repeater_id)
        except ResourceNotFound:
            return None

    @property
    def url(self):
        warnings.warn("RepeatRecord.url is deprecated. Use Repeater.get_url instead", DeprecationWarning)
        if self.repeater:
            return self.repeater.get_url(self)

    @property
    def state(self):
        state = RECORD_PENDING_STATE
        if self.succeeded:
            state = RECORD_SUCCESS_STATE
        elif self.cancelled:
            state = RECORD_CANCELLED_STATE
        elif self.failure_reason:
            state = RECORD_FAILURE_STATE
        return state

    @classmethod
    def all(cls, domain=None, due_before=None, limit=None):
        json_now = json_format_datetime(due_before or datetime.utcnow())
        repeat_records = RepeatRecord.view("repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, json_now, {}],
            include_docs=True,
            reduce=False,
            limit=limit,
        )
        return repeat_records

    @classmethod
    def count(cls, domain=None):
        results = RepeatRecord.view("repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, {}],
            reduce=True,
        ).one()
        return results['value'] if results else 0

    def add_attempt(self, attempt):
        self.attempts.append(attempt)
        self.last_checked = attempt.datetime
        self.next_check = attempt.next_check
        self.succeeded = attempt.succeeded
        self.cancelled = attempt.cancelled
        self.failure_reason = attempt.failure_reason

    def get_numbered_attempts(self):
        for i, attempt in enumerate(self.attempts):
            yield i + 1, attempt

    def postpone_by(self, duration):
        self.last_checked = datetime.utcnow()
        self.next_check = self.last_checked + duration
        self.save()

    def make_set_next_try_attempt(self, failure_reason):
        # we use an exponential back-off to avoid submitting to bad urls
        # too frequently.
        assert self.succeeded is False
        assert self.next_check is not None
        window = timedelta(minutes=0)
        if self.last_checked:
            window = self.next_check - self.last_checked
            window += (window // 2)  # window *= 1.5
        if window < MIN_RETRY_WAIT:
            window = MIN_RETRY_WAIT
        elif window > MAX_RETRY_WAIT:
            window = MAX_RETRY_WAIT

        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=failure_reason,
            success_response=None,
            next_check=now + window,
            succeeded=False,
        )

    def try_now(self):
        # try when we haven't succeeded and either we've
        # never checked, or it's time to check again
        return not self.succeeded

    def get_payload(self):
        return self.repeater.get_payload(self)

    def get_attempt_info(self):
        return self.repeater.get_attempt_info(self)

    def handle_payload_exception(self, exception):
        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=True,
            datetime=now,
            failure_reason=six.text_type(exception),
            success_response=None,
            next_check=None,
            succeeded=False,
        )

    def fire(self, force_send=False):
        if self.try_now() or force_send:
            self.overall_tries += 1
            try:
                attempt = self.repeater.fire_for_record(self)
            except Exception as e:
                log_repeater_error_in_datadog(self.domain, status_code=None,
                                              repeater_type=self.repeater_type)
                attempt = self.handle_payload_exception(e)
                raise
            finally:
                # pycharm warns attempt might not be defined.
                # that'll only happen if fire_for_record raise a non-Exception exception (e.g. SIGINT)
                # or handle_payload_exception raises an exception. I'm okay with that. -DMR
                self.add_attempt(attempt)
                self.save()

    @staticmethod
    def _format_response(response):
        return '{}: {}.\n{}'.format(
            response.status_code, response.reason, getattr(response, 'content', None))

    def handle_success(self, response):
        """Do something with the response if the repeater succeeds
        """
        now = datetime.utcnow()
        log_repeater_success_in_datadog(
            self.domain,
            response.status_code if response else None,
            self.repeater_type
        )
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=None,
            success_response=self._format_response(response) if response else None,
            next_check=None,
            succeeded=True,
            info=self.get_attempt_info(),
        )

    def handle_failure(self, response):
        """Do something with the response if the repeater fails
        """
        return self._make_failure_attempt(self._format_response(response), response)

    def handle_exception(self, exception):
        """handle internal exceptions
        """
        return self._make_failure_attempt(six.text_type(exception), None)

    def _make_failure_attempt(self, reason, response):
        log_repeater_error_in_datadog(self.domain, response.status_code if response else None,
                                      self.repeater_type)

        if self.repeater.allow_retries(response) and self.overall_tries < self.max_possible_tries:
            return self.make_set_next_try_attempt(reason)
        else:
            now = datetime.utcnow()
            return RepeatRecordAttempt(
                cancelled=True,
                datetime=now,
                failure_reason=reason,
                success_response=None,
                next_check=None,
                succeeded=False,
                info=self.get_attempt_info(),
            )

    def cancel(self):
        self.next_check = None
        self.cancelled = True

    def requeue(self):
        self.cancelled = False
        self.succeeded = False
        self.failure_reason = ''
        self.overall_tries = 0
        self.next_check = datetime.utcnow()
Example #18
0
class FormQuestionSchema(Document):
    """
    Contains information about the questions for a specific form
    specifically the options that are available (or have ever been available) for
    any multi-select questions.
    Calling `update_schema` will load the app and any saved versions of the app
    that have not already been processed and update the question schema with
    any new options.
    """
    domain = StringProperty(required=True)
    app_id = StringProperty(required=True)
    xmlns = StringProperty(required=True)

    last_processed_version = IntegerProperty(default=0)
    processed_apps = SetProperty(str)
    apps_with_errors = SetProperty(str)
    question_schema = SchemaDictProperty(QuestionMeta)

    class Meta(object):
        app_label = 'export'

    @classmethod
    def _get_id(cls, domain, app_id, xmlns):
        def _none_to_empty_string(str):
            return str if str is not None else ''

        key = list(map(_none_to_empty_string, [domain, app_id, xmlns]))
        return hashlib.sha1(':'.join(key).encode('utf-8')).hexdigest()

    @classmethod
    def get_by_key(cls, domain, app_id, xmlns):
        _id = cls._get_id(domain, app_id, xmlns)
        return cls.get(_id)

    @classmethod
    def get_or_create(cls, domain, app_id, xmlns):
        try:
            schema = cls.get_by_key(domain, app_id, xmlns)
        except ResourceNotFound:
            old_schemas = FormQuestionSchema.view(
                'form_question_schema/by_xmlns',
                key=[domain, app_id, xmlns],
                include_docs=True).all()

            if old_schemas:
                doc = old_schemas[0].to_json()
                del doc['_id']
                del doc['_rev']
                schema = FormQuestionSchema.wrap(doc)
                schema.save()

                for old in old_schemas:
                    old.delete()
            else:
                schema = FormQuestionSchema(domain=domain,
                                            app_id=app_id,
                                            xmlns=xmlns)
                schema.save()

        return schema

    def validate(self, required=True):
        # this isn't always set, so set to empty strings if not found
        if self.app_id is None:
            self.app_id = ''

        super(FormQuestionSchema, self).validate(required=required)
        if not self.get_id:
            self._id = self._get_id(self.domain, self.app_id, self.xmlns)

    def update_schema(self):
        all_app_ids = get_build_ids_after_version(self.domain, self.app_id,
                                                  self.last_processed_version)

        all_seen_apps = self.apps_with_errors | self.processed_apps
        to_process = [
            app_id for app_id in all_app_ids if app_id not in all_seen_apps
        ]
        if self.app_id not in all_seen_apps:
            to_process.append(self.app_id)

        for app_doc in iter_docs(Application.get_db(), to_process):
            if is_remote_app(app_doc):
                continue
            app = Application.wrap(app_doc)
            try:
                self.update_for_app(app)
            except AppManagerException:
                self.apps_with_errors.add(app.get_id)
                self.last_processed_version = app.version

        if to_process:
            self.save()

    def update_for_app(self, app):
        xform = app.get_xform_by_xmlns(self.xmlns, log_missing=False)
        if xform:
            prefix = '/{}/'.format(xform.data_node.tag_name)

            def to_json_path(xml_path):
                if not xml_path:
                    return

                if xml_path.startswith(prefix):
                    xml_path = xml_path[len(prefix):]
                return 'form.{}'.format(xml_path.replace('/', '.'))

            for question in xform.get_questions(app.langs):
                question_path = to_json_path(question['value'])
                if question['tag'] == 'select':
                    meta = self.question_schema.get(
                        question_path,
                        QuestionMeta(
                            repeat_context=to_json_path(question['repeat'])))
                    for opt in question['options']:
                        if opt['value'] not in meta.options:
                            meta.options.append(opt['value'])

                    self.question_schema[question_path] = meta
                else:
                    # In the event that a question was previously a multi-select and not one any longer,
                    # we need to clear the question schema
                    self.question_schema.pop(question_path, None)

        self.processed_apps.add(app.get_id)
        self.last_processed_version = app.version
Example #19
0
class RepeatRecord(Document):
    """
    An record of a particular instance of something that needs to be forwarded
    with a link to the proper repeater object
    """

    domain = StringProperty()
    repeater_id = StringProperty()
    repeater_type = StringProperty()
    payload_id = StringProperty()

    overall_tries = IntegerProperty(default=0)
    max_possible_tries = IntegerProperty(default=6)

    attempts = ListProperty(RepeatRecordAttempt)

    cancelled = BooleanProperty(default=False)
    registered_on = DateTimeProperty()
    last_checked = DateTimeProperty()
    failure_reason = StringProperty()
    next_check = DateTimeProperty()
    succeeded = BooleanProperty(default=False)

    @property
    def record_id(self):
        return self._id

    @classmethod
    def wrap(cls, data):
        should_bootstrap_attempts = ('attempts' not in data)

        self = super(RepeatRecord, cls).wrap(data)

        if should_bootstrap_attempts and self.last_checked:
            assert not self.attempts
            self.attempts = [
                RepeatRecordAttempt(
                    cancelled=self.cancelled,
                    datetime=self.last_checked,
                    failure_reason=self.failure_reason,
                    success_response=None,
                    next_check=self.next_check,
                    succeeded=self.succeeded,
                )
            ]
        return self

    @property
    @memoized
    def repeater(self):
        try:
            return Repeater.get(self.repeater_id)
        except ResourceNotFound:
            return None

    @property
    def url(self):
        warnings.warn(
            "RepeatRecord.url is deprecated. Use Repeater.get_url instead",
            DeprecationWarning)
        if self.repeater:
            return self.repeater.get_url(self)

    @property
    def state(self):
        state = RECORD_PENDING_STATE
        if self.succeeded:
            state = RECORD_SUCCESS_STATE
        elif self.cancelled:
            state = RECORD_CANCELLED_STATE
        elif self.failure_reason:
            state = RECORD_FAILURE_STATE
        return state

    @classmethod
    def all(cls, domain=None, due_before=None, limit=None):
        json_now = json_format_datetime(due_before or datetime.utcnow())
        repeat_records = RepeatRecord.view(
            "repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, json_now, {}],
            include_docs=True,
            reduce=False,
            limit=limit,
        )
        return repeat_records

    @classmethod
    def count(cls, domain=None):
        results = RepeatRecord.view(
            "repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, {}],
            reduce=True,
        ).one()
        return results['value'] if results else 0

    def add_attempt(self, attempt):
        self.attempts.append(attempt)
        self.last_checked = attempt.datetime
        self.next_check = attempt.next_check
        self.succeeded = attempt.succeeded
        self.cancelled = attempt.cancelled
        self.failure_reason = attempt.failure_reason

    def get_numbered_attempts(self):
        for i, attempt in enumerate(self.attempts):
            yield i + 1, attempt

    def postpone_by(self, duration):
        self.last_checked = datetime.utcnow()
        self.next_check = self.last_checked + duration
        self.save()

    def make_set_next_try_attempt(self, failure_reason):
        assert self.succeeded is False
        assert self.next_check is not None
        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=failure_reason,
            success_response=None,
            next_check=now + _get_retry_interval(self.last_checked, now),
            succeeded=False,
        )

    def try_now(self):
        # try when we haven't succeeded and either we've
        # never checked, or it's time to check again
        return not self.succeeded

    def get_payload(self):
        return self.repeater.get_payload(self)

    def get_attempt_info(self):
        return self.repeater.get_attempt_info(self)

    def handle_payload_exception(self, exception):
        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=True,
            datetime=now,
            failure_reason=str(exception),
            success_response=None,
            next_check=None,
            succeeded=False,
        )

    def fire(self, force_send=False):
        if self.try_now() or force_send:
            self.overall_tries += 1
            try:
                attempt = self.repeater.fire_for_record(self)
            except Exception as e:
                log_repeater_error_in_datadog(self.domain,
                                              status_code=None,
                                              repeater_type=self.repeater_type)
                attempt = self.handle_payload_exception(e)
                raise
            finally:
                # pycharm warns attempt might not be defined.
                # that'll only happen if fire_for_record raise a non-Exception exception (e.g. SIGINT)
                # or handle_payload_exception raises an exception. I'm okay with that. -DMR
                self.add_attempt(attempt)
                self.save()

    @staticmethod
    def _format_response(response):
        if not _is_response(response):
            return None
        response_body = getattr(response, "text", "")
        return '{}: {}.\n{}'.format(response.status_code, response.reason,
                                    response_body)

    def handle_success(self, response):
        """
        Log success in Datadog and return a success RepeatRecordAttempt.

        ``response`` can be a Requests response instance, or True if the
        payload did not result in an API call.
        """
        now = datetime.utcnow()
        if _is_response(response):
            # ^^^ Don't bother logging success in Datadog if the payload
            # did not need to be sent. (This can happen with DHIS2 if
            # the form that triggered the forwarder doesn't contain data
            # for a DHIS2 Event.)
            log_repeater_success_in_datadog(self.domain, response.status_code,
                                            self.repeater_type)
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=None,
            success_response=self._format_response(response),
            next_check=None,
            succeeded=True,
            info=self.get_attempt_info(),
        )

    def handle_failure(self, response):
        """Do something with the response if the repeater fails
        """
        return self._make_failure_attempt(self._format_response(response),
                                          response)

    def handle_exception(self, exception):
        """handle internal exceptions
        """
        return self._make_failure_attempt(str(exception), None)

    def _make_failure_attempt(self, reason, response):
        log_repeater_error_in_datadog(
            self.domain, response.status_code if response else None,
            self.repeater_type)

        if self.repeater.allow_retries(
                response) and self.overall_tries < self.max_possible_tries:
            return self.make_set_next_try_attempt(reason)
        else:
            now = datetime.utcnow()
            return RepeatRecordAttempt(
                cancelled=True,
                datetime=now,
                failure_reason=reason,
                success_response=None,
                next_check=None,
                succeeded=False,
                info=self.get_attempt_info(),
            )

    def cancel(self):
        self.next_check = None
        self.cancelled = True

    def attempt_forward_now(self):
        from corehq.motech.repeaters.tasks import process_repeat_record

        def is_ready():
            return self.next_check < datetime.utcnow()

        def already_processed():
            return self.succeeded or self.cancelled or self.next_check is None

        if already_processed() or not is_ready():
            return

        # Set the next check to happen an arbitrarily long time from now so
        # if something goes horribly wrong with the delayed task it will not
        # be lost forever. A check at this time is expected to occur rarely,
        # if ever, because `process_repeat_record` will usually succeed or
        # reset the next check to sometime sooner.
        self.next_check = datetime.utcnow() + timedelta(hours=48)
        try:
            self.save()
        except ResourceConflict:
            # Another process beat us to the punch. This takes advantage
            # of Couch DB's optimistic locking, which prevents a process
            # with stale data from overwriting the work of another.
            return
        process_repeat_record.delay(self)

    def requeue(self):
        self.cancelled = False
        self.succeeded = False
        self.failure_reason = ''
        self.overall_tries = 0
        self.next_check = datetime.utcnow()
Example #20
0
class QuestionMeta(DocumentSchema):
    options = ListProperty()
    repeat_context = StringProperty()

    class Meta(object):
        app_label = 'export'
Example #21
0
class OpenmrsFormConfig(DocumentSchema):
    xmlns = StringProperty()
    openmrs_visit_type = StringProperty()
    openmrs_encounter_type = StringProperty()
    openmrs_form = StringProperty()
    openmrs_observations = ListProperty(ObservationMapping)
Example #22
0
class ObservationMapping(DocumentSchema):
    concept = StringProperty()
    value = SchemaProperty(ValueSource)
Example #23
0
class AtomFeedStatus(DocumentSchema):
    last_polled_at = DateTimeProperty(default=None)
    last_page = StringProperty(default=None)
Example #24
0
class DataSourceConfiguration(CachedCouchDocumentMixin, Document,
                              AbstractUCRDataSource):
    """
    A data source configuration. These map 1:1 with database tables that get created.
    Each data source can back an arbitrary number of reports.
    """
    domain = StringProperty(required=True)
    engine_id = StringProperty(default=UCR_ENGINE_ID)
    backend_id = StringProperty(default=UCR_SQL_BACKEND)  # no longer used
    referenced_doc_type = StringProperty(required=True)
    table_id = StringProperty(required=True)
    display_name = StringProperty()
    base_item_expression = DictProperty()
    configured_filter = DictProperty()
    configured_indicators = ListProperty()
    named_expressions = DictProperty()
    named_filters = DictProperty()
    meta = SchemaProperty(DataSourceMeta)
    is_deactivated = BooleanProperty(default=False)
    last_modified = DateTimeProperty()
    asynchronous = BooleanProperty(default=False)
    sql_column_indexes = SchemaListProperty(SQLColumnIndexes)
    disable_destructive_rebuild = BooleanProperty(default=False)
    sql_settings = SchemaProperty(SQLSettings)

    class Meta(object):
        # prevent JsonObject from auto-converting dates etc.
        string_conversions = ()

    def __str__(self):
        return '{} - {}'.format(self.domain, self.display_name)

    def save(self, **params):
        self.last_modified = datetime.utcnow()
        super(DataSourceConfiguration, self).save(**params)

    @property
    def data_source_id(self):
        return self._id

    def filter(self, document):
        filter_fn = self._get_main_filter()
        return filter_fn(document, EvaluationContext(document, 0))

    def deleted_filter(self, document):
        filter_fn = self._get_deleted_filter()
        return filter_fn and filter_fn(document, EvaluationContext(
            document, 0))

    @memoized
    def _get_main_filter(self):
        return self._get_filter([self.referenced_doc_type])

    @memoized
    def _get_deleted_filter(self):
        return self._get_filter(get_deleted_doc_types(
            self.referenced_doc_type),
                                include_configured=False)

    def _get_filter(self, doc_types, include_configured=True):
        if not doc_types:
            return None

        extras = ([self.configured_filter]
                  if include_configured and self.configured_filter else [])
        built_in_filters = [
            self._get_domain_filter_spec(),
            {
                'type':
                'or',
                'filters': [{
                    "type": "boolean_expression",
                    "expression": {
                        "type": "property_name",
                        "property_name": "doc_type",
                    },
                    "operator": "eq",
                    "property_value": doc_type,
                } for doc_type in doc_types],
            },
        ]
        return FilterFactory.from_spec(
            {
                'type': 'and',
                'filters': built_in_filters + extras,
            },
            context=self.get_factory_context(),
        )

    def _get_domain_filter_spec(self):
        return {
            "type": "boolean_expression",
            "expression": {
                "type": "property_name",
                "property_name": "domain",
            },
            "operator": "eq",
            "property_value": self.domain,
        }

    @property
    @memoized
    def named_expression_objects(self):
        named_expression_specs = deepcopy(self.named_expressions)
        named_expressions = {}
        spec_error = None
        while named_expression_specs:
            number_generated = 0
            for name, expression in list(named_expression_specs.items()):
                try:
                    named_expressions[name] = ExpressionFactory.from_spec(
                        expression,
                        FactoryContext(named_expressions=named_expressions,
                                       named_filters={}))
                    number_generated += 1
                    del named_expression_specs[name]
                except BadSpecError as bad_spec_error:
                    # maybe a nested name resolution issue, try again on the next pass
                    spec_error = bad_spec_error
            if number_generated == 0 and named_expression_specs:
                # we unsuccessfully generated anything on this pass and there are still unresolved
                # references. we have to fail.
                assert spec_error is not None
                raise spec_error
        return named_expressions

    @property
    @memoized
    def named_filter_objects(self):
        return {
            name: FilterFactory.from_spec(
                filter, FactoryContext(self.named_expression_objects, {}))
            for name, filter in self.named_filters.items()
        }

    def get_factory_context(self):
        return FactoryContext(self.named_expression_objects,
                              self.named_filter_objects)

    @property
    @memoized
    def default_indicators(self):
        default_indicators = [
            IndicatorFactory.from_spec(
                {
                    "column_id": "doc_id",
                    "type": "expression",
                    "display_name": "document id",
                    "datatype": "string",
                    "is_nullable": False,
                    "is_primary_key": True,
                    "expression": {
                        "type": "root_doc",
                        "expression": {
                            "type": "property_name",
                            "property_name": "_id"
                        }
                    }
                }, self.get_factory_context())
        ]

        default_indicators.append(
            IndicatorFactory.from_spec({
                "type": "inserted_at",
            }, self.get_factory_context()))

        if self.base_item_expression:
            default_indicators.append(
                IndicatorFactory.from_spec({
                    "type": "repeat_iteration",
                }, self.get_factory_context()))

        return default_indicators

    @property
    @memoized
    def indicators(self):
        return CompoundIndicator(
            self.display_name,
            self.default_indicators + [
                IndicatorFactory.from_spec(indicator,
                                           self.get_factory_context())
                for indicator in self.configured_indicators
            ],
            None,
        )

    @property
    @memoized
    def parsed_expression(self):
        if self.base_item_expression:
            return ExpressionFactory.from_spec(
                self.base_item_expression, context=self.get_factory_context())
        return None

    def get_columns(self):
        return self.indicators.get_columns()

    @property
    @memoized
    def columns_by_id(self):
        return {c.id: c for c in self.get_columns()}

    def get_column_by_id(self, column_id):
        return self.columns_by_id.get(column_id)

    def get_items(self, document, eval_context=None):
        if self.filter(document):
            if not self.base_item_expression:
                return [document]
            else:
                result = self.parsed_expression(document, eval_context)
                if result is None:
                    return []
                elif isinstance(result, list):
                    return result
                else:
                    return [result]
        else:
            return []

    def get_all_values(self, doc, eval_context=None):
        if not eval_context:
            eval_context = EvaluationContext(doc)

        rows = []
        for item in self.get_items(doc, eval_context):
            indicators = self.indicators.get_values(item, eval_context)
            rows.append(indicators)
            eval_context.increment_iteration()

        return rows

    def get_report_count(self):
        """
        Return the number of ReportConfigurations that reference this data source.
        """
        return ReportConfiguration.count_by_data_source(self.domain, self._id)

    def validate(self, required=True):
        super(DataSourceConfiguration, self).validate(required)
        # these two properties implicitly call other validation
        self._get_main_filter()
        self._get_deleted_filter()

        # validate indicators and column uniqueness
        columns = [c.id for c in self.indicators.get_columns()]
        unique_columns = set(columns)
        if len(columns) != len(unique_columns):
            for column in set(columns):
                columns.remove(column)
            raise DuplicateColumnIdError(columns=columns)

        if self.referenced_doc_type not in VALID_REFERENCED_DOC_TYPES:
            raise BadSpecError(
                _('Report contains invalid referenced_doc_type: {}').format(
                    self.referenced_doc_type))

        self.parsed_expression

    @classmethod
    def by_domain(cls, domain):
        return get_datasources_for_domain(domain)

    @classmethod
    def all_ids(cls):
        return [
            res['id'] for res in cls.get_db().view(
                'userreports/data_sources_by_build_info',
                reduce=False,
                include_docs=False)
        ]

    @classmethod
    def all(cls):
        for result in iter_docs(cls.get_db(), cls.all_ids()):
            yield cls.wrap(result)

    @property
    def is_static(self):
        return id_is_static(self._id)

    def deactivate(self):
        if not self.is_static:
            self.is_deactivated = True
            self.save()
            get_indicator_adapter(self).drop_table()

    def get_case_type_or_xmlns_filter(self):
        """Returns a list of case types or xmlns from the filter of this data source.

        If this can't figure out the case types or xmlns's that filter, then returns [None]
        Currently always returns a list because it is called by a loop in _iteratively_build_table
        Could be reworked to return [] to be more pythonic
        """
        if self.referenced_doc_type not in FILTER_INTERPOLATION_DOC_TYPES:
            return [None]

        property_name = FILTER_INTERPOLATION_DOC_TYPES[
            self.referenced_doc_type]
        prop_value = self._filter_interploation_helper(self.configured_filter,
                                                       property_name)

        return prop_value or [None]

    def _filter_interploation_helper(self, config_filter, property_name):
        filter_type = config_filter.get('type')
        if filter_type == 'and':
            sub_config_filters = [
                self._filter_interploation_helper(f, property_name)
                for f in config_filter.get('filters')
            ]
            for filter_ in sub_config_filters:
                if filter_[0]:
                    return filter_

        if filter_type != 'boolean_expression':
            return [None]

        if config_filter['operator'] not in ('eq', 'in'):
            return [None]

        expression = config_filter['expression']
        if expression['type'] == 'property_name' and expression[
                'property_name'] == property_name:
            prop_value = config_filter['property_value']
            if not isinstance(prop_value, list):
                prop_value = [prop_value]
            return prop_value
        return [None]
Example #25
0
class OpenmrsRepeater(CaseRepeater):
    """
    ``OpenmrsRepeater`` is responsible for updating OpenMRS patients
    with changes made to cases in CommCare. It is also responsible for
    creating OpenMRS "visits", "encounters" and "observations" when a
    corresponding visit form is submitted in CommCare.

    The ``OpenmrsRepeater`` class is different from most repeater
    classes in three details:

    1. It has a case type and it updates the OpenMRS equivalent of cases
       like the ``CaseRepeater`` class, but it reads forms like the
       ``FormRepeater`` class. So it subclasses ``CaseRepeater`` but its
       payload format is ``form_json``.

    2. It makes many API calls for each payload.

    3. It can have a location.

    """
    class Meta(object):
        app_label = 'repeaters'

    include_app_id_param = False
    friendly_name = _("Forward to OpenMRS")
    payload_generator_classes = (FormRepeaterJsonPayloadGenerator, )

    location_id = StringProperty(default='')
    openmrs_config = SchemaProperty(OpenmrsConfig)

    _has_config = True

    # self.white_listed_case_types must have exactly one case type set
    # for Atom feed integration to add cases for OpenMRS patients.
    # self.location_id must be set to determine their case owner. The
    # owner is set to the first CommCareUser instance found at that
    # location.
    atom_feed_enabled = BooleanProperty(default=False)
    atom_feed_status = SchemaDictProperty(AtomFeedStatus)

    def __init__(self, *args, **kwargs):
        super(OpenmrsRepeater, self).__init__(*args, **kwargs)

    def __eq__(self, other):
        return (isinstance(other, self.__class__)
                and self.get_id == other.get_id)

    def __str__(self):
        return Repeater.__str__(self)

    @classmethod
    def wrap(cls, data):
        if 'atom_feed_last_polled_at' in data:
            data['atom_feed_status'] = {
                ATOM_FEED_NAME_PATIENT: {
                    'last_polled_at': data.pop('atom_feed_last_polled_at'),
                    'last_page': data.pop('atom_feed_last_page', None),
                }
            }
        return super(OpenmrsRepeater, cls).wrap(data)

    @cached_property
    def requests(self):
        # Used by atom_feed module and views that don't have a payload
        # associated with the request
        return self.get_requests()

    def get_requests(self, payload_id=None):
        return Requests(
            self.domain,
            self.url,
            self.username,
            self.plaintext_password,
            verify=self.verify,
            notify_addresses=self.notify_addresses,
            payload_id=payload_id,
        )

    @cached_property
    def first_user(self):
        return get_one_commcare_user_at_location(self.domain, self.location_id)

    @memoized
    def payload_doc(self, repeat_record):
        return FormAccessors(repeat_record.domain).get_form(
            repeat_record.payload_id)

    @property
    def form_class_name(self):
        """
        The class name used to determine which edit form to use
        """
        return self.__class__.__name__

    @classmethod
    def available_for_domain(cls, domain):
        return OPENMRS_INTEGRATION.enabled(domain)

    def allowed_to_forward(self, payload):
        """
        Forward the payload if ...

        * it did not come from OpenMRS, and
        * CaseRepeater says it's OK for the case types and users of any
          of the payload's cases, and
        * this repeater forwards to the right OpenMRS server for any of
          the payload's cases.

        :param payload: An XFormInstance (not a case)

        """
        if payload.xmlns == XMLNS_OPENMRS:
            # payload came from OpenMRS. Don't send it back.
            return False

        case_blocks = extract_case_blocks(payload)
        case_ids = [case_block['@case_id'] for case_block in case_blocks]
        cases = CaseAccessors(payload.domain).get_cases(case_ids, ordered=True)
        if not any(
                CaseRepeater.allowed_to_forward(self, case) for case in cases):
            # If none of the case updates in the payload are allowed to
            # be forwarded, drop it.
            return False

        if not self.location_id:
            # If this repeater  does not have a location, all payloads
            # should go to it.
            return True

        repeaters = [
            repeater for case in cases
            for repeater in get_case_location_ancestor_repeaters(case)
        ]
        # If this repeater points to the wrong OpenMRS server for this
        # payload then let the right repeater handle it.
        return self in repeaters

    def get_payload(self, repeat_record):
        payload = super(OpenmrsRepeater, self).get_payload(repeat_record)
        return json.loads(payload)

    def send_request(self, repeat_record, payload):
        value_source_configs: Iterable[JsonDict] = chain(
            self.openmrs_config.case_config.patient_identifiers.values(),
            self.openmrs_config.case_config.person_properties.values(),
            self.openmrs_config.case_config.person_preferred_name.values(),
            self.openmrs_config.case_config.person_preferred_address.values(),
            self.openmrs_config.case_config.person_attributes.values(),
        )
        case_trigger_infos = get_relevant_case_updates_from_form_json(
            self.domain,
            payload,
            case_types=self.white_listed_case_types,
            extra_fields=[
                conf["case_property"] for conf in value_source_configs
                if "case_property" in conf
            ],
            form_question_values=get_form_question_values(payload),
        )
        requests = self.get_requests(payload_id=repeat_record.payload_id)
        try:
            response = send_openmrs_data(
                requests,
                self.domain,
                payload,
                self.openmrs_config,
                case_trigger_infos,
            )
        except Exception as err:
            requests.notify_exception(str(err))
            return OpenmrsResponse(400, 'Bad Request', pformat_json(str(err)))
        return response
Example #26
0
class ReportConfiguration(UnicodeMixIn, QuickCachedDocumentMixin, Document):
    """
    A report configuration. These map 1:1 with reports that show up in the UI.
    """
    domain = StringProperty(required=True)
    visible = BooleanProperty(default=True)
    # config_id of the datasource
    config_id = StringProperty(required=True)
    data_source_type = StringProperty(
        default=DATA_SOURCE_TYPE_STANDARD,
        choices=[DATA_SOURCE_TYPE_STANDARD, DATA_SOURCE_TYPE_AGGREGATE])
    title = StringProperty()
    description = StringProperty()
    aggregation_columns = StringListProperty()
    filters = ListProperty()
    columns = ListProperty()
    configured_charts = ListProperty()
    sort_expression = ListProperty()
    soft_rollout = DecimalProperty(default=0)  # no longer used
    report_meta = SchemaProperty(ReportMeta)
    custom_query_provider = StringProperty(required=False)

    def __unicode__(self):
        return '{} - {}'.format(self.domain, self.title)

    def save(self, *args, **kwargs):
        self.report_meta.last_modified = datetime.utcnow()
        super(ReportConfiguration, self).save(*args, **kwargs)

    @property
    @memoized
    def filters_without_prefilters(self):
        return [f for f in self.filters if f['type'] != 'pre']

    @property
    @memoized
    def prefilters(self):
        return [f for f in self.filters if f['type'] == 'pre']

    @property
    @memoized
    def config(self):
        return get_datasource_config(self.config_id, self.domain,
                                     self.data_source_type)[0]

    @property
    @memoized
    def report_columns(self):
        return [
            ReportColumnFactory.from_spec(c, self.is_static)
            for c in self.columns
        ]

    @property
    @memoized
    def ui_filters(self):
        return [ReportFilterFactory.from_spec(f, self) for f in self.filters]

    @property
    @memoized
    def charts(self):
        return [ChartFactory.from_spec(g._obj) for g in self.configured_charts]

    @property
    @memoized
    def location_column_id(self):
        cols = [col for col in self.report_columns if col.type == 'location']
        if cols:
            return cols[0].column_id

    @property
    def map_config(self):
        def map_col(column):
            if column['column_id'] != self.location_column_id:
                return {
                    'column_id': column['column_id'],
                    'label': column['display']
                }

        if self.location_column_id:
            return {
                'location_column_id': self.location_column_id,
                'layer_name': {
                    'XFormInstance': _('Forms'),
                    'CommCareCase': _('Cases')
                }.get(self.config.referenced_doc_type, "Layer"),
                'columns':
                [x for x in (map_col(col) for col in self.columns) if x]
            }

    @property
    @memoized
    def sort_order(self):
        return [
            ReportOrderByFactory.from_spec(e) for e in self.sort_expression
        ]

    @property
    def table_id(self):
        return self.config.table_id

    def get_ui_filter(self, filter_slug):
        for filter in self.ui_filters:
            if filter.name == filter_slug:
                return filter
        return None

    def get_languages(self):
        """
        Return the languages used in this report's column and filter display properties.
        Note that only explicitly identified languages are returned. So, if the
        display properties are all strings, "en" would not be returned.
        """
        langs = set()
        for item in self.columns + self.filters:
            if isinstance(item['display'], dict):
                langs |= set(item['display'].keys())
        return langs

    def validate(self, required=True):
        from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource

        def _check_for_duplicates(supposedly_unique_list, error_msg):
            # http://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-python-list
            duplicate_items = set([
                item for item in supposedly_unique_list
                if supposedly_unique_list.count(item) > 1
            ])
            if len(duplicate_items) > 0:
                raise BadSpecError(
                    _(error_msg).format(', '.join(sorted(duplicate_items))))

        super(ReportConfiguration, self).validate(required)

        # check duplicates before passing to factory since it chokes on them
        _check_for_duplicates(
            [FilterSpec.wrap(f).slug for f in self.filters],
            'Filters cannot contain duplicate slugs: {}',
        )
        _check_for_duplicates(
            [
                column_id for c in self.report_columns
                for column_id in c.get_column_ids()
            ],
            'Columns cannot contain duplicate column_ids: {}',
        )

        # these calls all implicitly do validation
        ConfigurableReportDataSource.from_spec(self)
        self.ui_filters
        self.charts
        self.sort_order

    @classmethod
    @quickcache(['cls.__name__', 'domain'])
    def by_domain(cls, domain):
        return get_report_configs_for_domain(domain)

    @classmethod
    @quickcache(['cls.__name__', 'domain', 'data_source_id'])
    def count_by_data_source(cls, domain, data_source_id):
        return get_number_of_report_configs_by_data_source(
            domain, data_source_id)

    def clear_caches(self):
        super(ReportConfiguration, self).clear_caches()
        self.by_domain.clear(self.__class__, self.domain)
        self.count_by_data_source.clear(self.__class__, self.domain,
                                        self.config_id)

    @property
    def is_static(self):
        return report_config_id_is_static(self._id)
Example #27
0
class Repeater(QuickCachedDocumentMixin, Document):
    """
    Represents the configuration of a repeater. Will specify the URL to forward to and
    other properties of the configuration.
    """
    base_doc = 'Repeater'

    domain = StringProperty()
    url = StringProperty()
    format = StringProperty()

    auth_type = StringProperty(choices=(BASIC_AUTH, DIGEST_AUTH, OAUTH1), required=False)
    username = StringProperty()
    password = StringProperty()
    skip_cert_verify = BooleanProperty(default=False)
    friendly_name = _("Data")
    paused = BooleanProperty(default=False)

    payload_generator_classes = ()

    @classmethod
    def get_custom_url(cls, domain):
        return None

    @classmethod
    def available_for_domain(cls, domain):
        """Returns whether this repeater can be used by a particular domain
        """
        return True

    def get_pending_record_count(self):
        return get_pending_repeat_record_count(self.domain, self._id)

    def get_failure_record_count(self):
        return get_failure_repeat_record_count(self.domain, self._id)

    def get_success_record_count(self):
        return get_success_repeat_record_count(self.domain, self._id)

    def get_cancelled_record_count(self):
        return get_cancelled_repeat_record_count(self.domain, self._id)

    def _format_or_default_format(self):
        from corehq.motech.repeaters.repeater_generators import RegisterGenerator
        return self.format or RegisterGenerator.default_format_by_repeater(self.__class__)

    def _get_payload_generator(self, payload_format):
        from corehq.motech.repeaters.repeater_generators import RegisterGenerator
        gen = RegisterGenerator.generator_class_by_repeater_format(self.__class__, payload_format)
        return gen(self)

    @property
    @memoized
    def generator(self):
        return self._get_payload_generator(self._format_or_default_format())

    def payload_doc(self, repeat_record):
        raise NotImplementedError

    @memoized
    def get_payload(self, repeat_record):
        return self.generator.get_payload(repeat_record, self.payload_doc(repeat_record))

    def get_attempt_info(self, repeat_record):
        return None

    def register(self, payload, next_check=None):
        if not self.allowed_to_forward(payload):
            return

        now = datetime.utcnow()
        repeat_record = RepeatRecord(
            repeater_id=self.get_id,
            repeater_type=self.doc_type,
            domain=self.domain,
            registered_on=now,
            next_check=next_check or now,
            payload_id=payload.get_id
        )
        repeat_record.save()
        return repeat_record

    def allowed_to_forward(self, payload):
        """
        Return True/False depending on whether the payload meets forawrding criteria or not
        """
        return True

    def clear_caches(self):
        super(Repeater, self).clear_caches()
        # Also expire for cases repeater is fetched using Repeater class.
        # The quick cache called in clear_cache also check on relies of doc class
        # so in case the class is set as Repeater it is not expired like in edit forms.
        # So expire it explicitly here with Repeater class as well.
        Repeater.get.clear(Repeater, self._id)
        if self.__class__ == Repeater:
            cls = self.get_class_from_doc_type(self.doc_type)
        else:
            cls = self.__class__
        # clear cls.by_domain (i.e. filtered by doc type)
        Repeater.by_domain.clear(cls, self.domain)
        # clear Repeater.by_domain (i.e. not filtered by doc type)
        Repeater.by_domain.clear(Repeater, self.domain)

    @classmethod
    @quickcache(['cls.__name__', 'domain'], timeout=5 * 60, memoize_timeout=10)
    def by_domain(cls, domain):
        key = [domain]
        if cls.__name__ in get_all_repeater_types():
            key.append(cls.__name__)
        elif cls.__name__ == Repeater.__name__:
            # In this case the wrap function delegates to the
            # appropriate sub-repeater types.
            pass
        else:
            # Any repeater type can be posted to the API, and the installed apps
            # determine whether we actually know about it.
            # But if we do not know about it, then may as well return nothing now
            return []

        raw_docs = cls.view('repeaters/repeaters',
            startkey=key,
            endkey=key + [{}],
            include_docs=True,
            reduce=False,
            wrap_doc=False
        )

        return [cls.wrap(repeater_doc['doc']) for repeater_doc in raw_docs
                if cls.get_class_from_doc_type(repeater_doc['doc']['doc_type'])]

    @classmethod
    def wrap(cls, data):
        if cls.__name__ == Repeater.__name__:
            cls_ = cls.get_class_from_doc_type(data['doc_type'])
            if cls_:
                return cls_.wrap(data)
            else:
                raise ResourceNotFound('Unknown repeater type: %s' % data)
        else:
            return super(Repeater, cls).wrap(data)

    @staticmethod
    def get_class_from_doc_type(doc_type):
        doc_type = doc_type.replace(DELETED, '')
        repeater_types = get_all_repeater_types()
        if doc_type in repeater_types:
            return repeater_types[doc_type]
        else:
            return None

    def retire(self):
        if DELETED not in self['doc_type']:
            self['doc_type'] += DELETED
        if DELETED not in self['base_doc']:
            self['base_doc'] += DELETED
        self.paused = False
        self.save()

    def pause(self):
        self.paused = True
        self.save()

    def resume(self):
        self.paused = False
        self.save()

    def get_url(self, repeat_record):
        # to be overridden
        return self.url

    def allow_retries(self, response):
        """Whether to requeue the repeater when it fails
        """
        return True

    def get_headers(self, repeat_record):
        # to be overridden
        return self.generator.get_headers()

    @property
    def plaintext_password(self):
        if self.password.startswith('${algo}$'.format(algo=ALGO_AES)):
            ciphertext = self.password.split('$', 2)[2]
            return b64_aes_decrypt(ciphertext)
        return self.password

    def get_auth(self):
        if self.auth_type == BASIC_AUTH:
            return HTTPBasicAuth(self.username, self.plaintext_password)
        elif self.auth_type == DIGEST_AUTH:
            return HTTPDigestAuth(self.username, self.plaintext_password)
        return None

    @property
    def verify(self):
        return not self.skip_cert_verify

    def send_request(self, repeat_record, payload):
        headers = self.get_headers(repeat_record)
        auth = self.get_auth()
        url = self.get_url(repeat_record)
        return simple_post(payload, url, headers=headers, timeout=POST_TIMEOUT, auth=auth, verify=self.verify)

    def fire_for_record(self, repeat_record):
        payload = self.get_payload(repeat_record)
        try:
            response = self.send_request(repeat_record, payload)
        except (Timeout, ConnectionError) as error:
            log_repeater_timeout_in_datadog(self.domain)
            return self.handle_response(RequestConnectionError(error), repeat_record)
        except Exception as e:
            return self.handle_response(e, repeat_record)
        else:
            return self.handle_response(response, repeat_record)

    def handle_response(self, result, repeat_record):
        """
        route the result to the success, failure, or exception handlers

        result may be either a response object or an exception
        """
        if isinstance(result, Exception):
            attempt = repeat_record.handle_exception(result)
            self.generator.handle_exception(result, repeat_record)
        elif 200 <= result.status_code < 300:
            attempt = repeat_record.handle_success(result)
            self.generator.handle_success(result, self.payload_doc(repeat_record), repeat_record)
        else:
            attempt = repeat_record.handle_failure(result)
            self.generator.handle_failure(result, self.payload_doc(repeat_record), repeat_record)
        return attempt

    @property
    def form_class_name(self):
        """
        Return the name of the class whose edit form this class uses.

        (Most classes that extend CaseRepeater, and all classes that
        extend FormRepeater, use the same form.)
        """
        return self.__class__.__name__
Example #28
0
class StaticReportConfiguration(JsonObject):
    """
    For statically defined reports based off of custom data sources

    This class keeps the full list of static report configurations relevant to the
    current environment in memory and upon requests builds a new report configuration
    from the static report config.

    See 0002-keep-static-ucr-configurations-in-memory.md
    """
    domains = ListProperty(required=True)
    report_id = StringProperty(validators=(_check_ids))
    data_source_table = StringProperty()
    config = DictProperty()
    custom_configurable_report = StringProperty()
    server_environment = ListProperty(required=True)

    @classmethod
    def get_doc_id(cls, domain, report_id, custom_configurable_report):
        return '{}{}-{}'.format(
            STATIC_PREFIX
            if not custom_configurable_report else CUSTOM_REPORT_PREFIX,
            domain,
            report_id,
        )

    @classmethod
    def _all(cls):
        def __get_all():
            for path_or_glob in settings.STATIC_UCR_REPORTS:
                if os.path.isfile(path_or_glob):
                    yield _get_wrapped_object_from_file(path_or_glob, cls)
                else:
                    files = glob.glob(path_or_glob)
                    for path in files:
                        yield _get_wrapped_object_from_file(path, cls)

        filter_by_env = settings.UNIT_TESTING or settings.DEBUG
        return __get_all() if filter_by_env else _filter_by_server_env(
            __get_all())

    @classmethod
    @memoized
    def by_id_mapping(cls):
        return {
            cls.get_doc_id(domain, wrapped.report_id,
                           wrapped.custom_configurable_report):
            (domain, wrapped)
            for wrapped in cls._all() for domain in wrapped.domains
        }

    @classmethod
    def all(cls):
        """Only used in tests"""
        for wrapped in StaticReportConfiguration._all():
            for domain in wrapped.domains:
                yield cls._get_report_config(wrapped, domain)

    @classmethod
    def by_domain(cls, domain):
        """
        Returns a list of ReportConfiguration objects, NOT StaticReportConfigurations.
        """
        return [
            cls._get_report_config(wrapped, dom)
            for dom, wrapped in cls.by_id_mapping().values() if domain == dom
        ]

    @classmethod
    def by_id(cls, config_id, domain):
        """Returns a ReportConfiguration object, NOT StaticReportConfigurations.
        """
        try:
            report_domain, wrapped = cls.by_id_mapping()[config_id]
        except KeyError:
            raise BadSpecError(
                _('The report configuration referenced by this report could '
                  'not be found: %(report_id)s') % {'report_id': config_id})

        if domain and report_domain != domain:
            raise DocumentNotFound(
                "Document {} of class {} not in domain {}!".format(
                    config_id,
                    ReportConfiguration.__class__.__name__,
                    domain,
                ))
        return cls._get_report_config(wrapped, report_domain)

    @classmethod
    def by_ids(cls, config_ids):
        mapping = cls.by_id_mapping()
        config_by_ids = {}
        for config_id in set(config_ids):
            try:
                domain, wrapped = mapping[config_id]
            except KeyError:
                raise ReportConfigurationNotFoundError(
                    _("The following report configuration could not be found: {}"
                      .format(config_id)))
            config_by_ids[config_id] = cls._get_report_config(wrapped, domain)
        return config_by_ids

    @classmethod
    def report_class_by_domain_and_id(cls, domain, config_id):
        try:
            report_domain, wrapped = cls.by_id_mapping()[config_id]
        except KeyError:
            raise BadSpecError(
                _('The report configuration referenced by this report could not be found.'
                  ))
        if report_domain != domain:
            raise DocumentNotFound(
                "Document {} of class {} not in domain {}!".format(
                    config_id,
                    ReportConfiguration.__class__.__name__,
                    domain,
                ))
        return wrapped.custom_configurable_report

    @classmethod
    def _get_report_config(cls, static_config, domain):
        doc = copy(static_config.to_json()['config'])
        doc['domain'] = domain
        doc['_id'] = cls.get_doc_id(domain, static_config.report_id,
                                    static_config.custom_configurable_report)
        doc['config_id'] = StaticDataSourceConfiguration.get_doc_id(
            domain, static_config.data_source_table)
        return ReportConfiguration.wrap(doc)
Example #29
0
class FHIRRepeater(CaseRepeater):
    class Meta:
        app_label = 'repeaters'

    friendly_name = _('Forward Cases to a FHIR API')
    payload_generator_classes = (FormDictPayloadGenerator, )
    include_app_id_param = False
    _has_config = False

    fhir_version = StringProperty(default=FHIR_VERSION_4_0_1)
    patient_registration_enabled = BooleanProperty(default=True)
    patient_search_enabled = BooleanProperty(default=False)

    @memoized
    def payload_doc(self, repeat_record):
        return FormAccessors(repeat_record.domain).get_form(
            repeat_record.payload_id)

    @property
    def form_class_name(self):
        # The class name used to determine which edit form to use
        return self.__class__.__name__

    @classmethod
    def available_for_domain(cls, domain):
        return (domain_has_privilege(domain, DATA_FORWARDING)
                and FHIR_INTEGRATION.enabled(domain))

    def allowed_to_forward(self, payload):
        # When we update a case's external_id to their ID on a remote
        # FHIR service, the form is submitted with XMLNS_FHIR. This
        # check makes sure that we don't send the update back to FHIR.
        return payload.xmlns != XMLNS_FHIR

    def send_request(self, repeat_record, payload):
        """
        Generates FHIR resources from ``payload``, and sends them as a
        FHIR transaction bundle. If there are patients that need to be
        registered, that is done first.

        Returns an HTTP response-like object. If the payload has nothing
        to send, returns True.
        """
        requests = self.connection_settings.get_requests(
            repeat_record.payload_id)
        infos, resource_types = self.get_infos_resource_types(
            payload,
            self.fhir_version,
        )
        try:
            resources = get_info_resource_list(infos, resource_types)
            resources = register_patients(
                requests,
                resources,
                self.patient_registration_enabled,
                self.patient_search_enabled,
                self._id,
            )
            response = send_resources(
                requests,
                resources,
                self.fhir_version,
                self._id,
            )
        except Exception as err:
            requests.notify_exception(str(err))
            return RepeaterResponse(400, 'Bad Request', pformat_json(str(err)))
        return response

    def get_infos_resource_types(
        self,
        form_json: dict,
        fhir_version: str,
    ) -> Tuple[List[CaseTriggerInfo], Dict[str, FHIRResourceType]]:

        form_question_values = get_form_question_values(form_json)
        case_blocks = extract_case_blocks(form_json)
        cases_by_id = _get_cases_by_id(self.domain, case_blocks)
        resource_types_by_case_type = _get_resource_types_by_case_type(
            self.domain,
            fhir_version,
            cases_by_id.values(),
        )

        case_trigger_info_list = []
        for case_block in case_blocks:
            try:
                case = cases_by_id[case_block['@case_id']]
            except KeyError:
                form_id = form_json[TAG_FORM][TAG_META]['instanceID']
                raise CaseNotFound(
                    f"Form {form_id!r} touches case {case_block['@case_id']!r} "
                    "but that case is not found.")
            try:
                resource_type = resource_types_by_case_type[case.type]
            except KeyError:
                # The case type is not mapped to a FHIR resource type.
                # This case is not meant to be represented as a FHIR
                # resource.
                continue
            case_trigger_info_list.append(
                get_case_trigger_info(
                    case,
                    resource_type,
                    case_block,
                    form_question_values,
                ))
Example #30
0
class CaseProperty(ValueSource):
    case_property = StringProperty()

    def get_value(self, case_trigger_info):
        return case_trigger_info.updates.get(self.case_property)