Example #1
0
class NavigationEventAudit(AuditEvent):
    """
    Audit event to track happenings within the system, ie, view access
    """
    request_path = StringProperty()
    ip_address = StringProperty()
    user_agent = StringProperty()

    view = StringProperty()  #the fully qualifid view name
    view_kwargs = DictProperty()
    headers = DictProperty()  #the request.META?
    session_key = StringProperty(
    )  #in the future possibly save some disk space by storing user agent and IP stuff in a separte session document?

    status_code = IntegerProperty()

    extra = DictProperty()

    @property
    def summary(self):
        return "%s from %s" % (self.request_path, self.ip_address)

    class Meta(object):
        app_label = 'auditcare'

    @cached_property
    def domain(self):
        from corehq.apps.domain.utils import get_domain_from_url
        return get_domain_from_url(self.request_path)

    @classmethod
    def audit_view(cls, request, user, view_func, view_kwargs, extra={}):
        """Creates an instance of a Access log."""
        try:
            audit = cls.create_audit(cls, user)
            audit.description += "View"
            if len(list(request.GET)) > 0:
                audit.request_path = "%s?%s" % (request.path, '&'.join([
                    "%s=%s" % (x, request.GET[x]) for x in request.GET.keys()
                ]))
            else:
                audit.request_path = request.path
            audit.ip_address = utils.get_ip(request)
            audit.user_agent = request.META.get('HTTP_USER_AGENT', '<unknown>')
            audit.view = "%s.%s" % (view_func.__module__, view_func.__name__)
            for k in STANDARD_HEADER_KEYS:
                header_item = request.META.get(k, None)
                if header_item is not None:
                    audit.headers[k] = header_item
            #audit.headers = request.META #it's a bit verbose to go to that extreme, TODO: need to have targeted fields in the META, but due to server differences, it's hard to make it universal.
            audit.session_key = request.session.session_key
            audit.extra = extra
            audit.view_kwargs = view_kwargs
            audit.save()
            return audit
        except Exception as ex:
            log.error("NavigationEventAudit.audit_view error: %s", ex)
Example #2
0
class DayTimeWindow(DocumentSchema):
    """
    Defines a window of time in a day of the week.
    Day/time combinations will be interpreted in the domain's timezone.
    """
    # 0 - 6 is Monday - Sunday; -1 means it applies to all days
    day = IntegerProperty()
    # For times, None means there's no lower/upper bound
    start_time = TimeProperty()
    end_time = TimeProperty()
Example #3
0
class InternalProperties(DocumentSchema, UpdatableSchema):
    """
    Project properties that should only be visible/editable by superusers
    """
    sf_contract_id = StringProperty()
    sf_account_id = StringProperty()
    commcare_edition = StringProperty(
        choices=['', "plus", "community", "standard", "pro", "advanced", "enterprise"],
        default="community"
    )
    initiative = StringListProperty()
    workshop_region = StringProperty()
    project_state = StringProperty(choices=["", "POC", "transition", "at-scale"], default="")
    self_started = BooleanProperty(default=True)
    area = StringProperty()
    sub_area = StringProperty()
    using_adm = BooleanProperty()
    using_call_center = BooleanProperty()
    custom_eula = BooleanProperty()
    can_use_data = BooleanProperty(default=True)
    notes = StringProperty()
    organization_name = StringProperty()
    platform = StringListProperty()
    project_manager = StringProperty()
    phone_model = StringProperty()
    goal_time_period = IntegerProperty()
    goal_followup_rate = DecimalProperty()
    # intentionally different from and commtrack_enabled so that FMs can change
    commtrack_domain = BooleanProperty()
    performance_threshold = IntegerProperty()
    experienced_threshold = IntegerProperty()
    amplifies_workers = StringProperty(
        choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
        default=AMPLIFIES_NOT_SET
    )
    amplifies_project = StringProperty(
        choices=[AMPLIFIES_YES, AMPLIFIES_NO, AMPLIFIES_NOT_SET],
        default=AMPLIFIES_NOT_SET
    )
    business_unit = StringProperty(choices=BUSINESS_UNITS + [""], default="")
    data_access_threshold = IntegerProperty()
Example #4
0
class BlobMetaRef(DocumentSchema):
    key = StringProperty()
    blobmeta_id = IntegerProperty()
    content_type = StringProperty()
    content_length = IntegerProperty()

    @classmethod
    def _from_attachment(cls, data):
        return cls(
            content_type=data.get("content_type"),
            content_length=data.get("length"),
        )

    @staticmethod
    def _normalize_json(dbname, doc_id, data):
        if "key" in data:
            return data
        return {
            "key": join(dbname, safe_id(doc_id), data["id"]),
            "content_length": data.get("content_length"),
            "content_type": data.get("content_type"),
        }
Example #5
0
class HQMediaMapItem(DocumentSchema):

    multimedia_id = StringProperty()
    media_type = StringProperty()
    version = IntegerProperty()
    unique_id = StringProperty()

    @property
    def url(self):
        return reverse("hqmedia_download", args=[self.media_type, self.multimedia_id]) if self.multimedia_id else ""

    @classmethod
    def gen_unique_id(cls, m_id, path):
        return hashlib.md5(b"%s: %s" % (path.encode('utf-8'), m_id.encode('utf-8'))).hexdigest()
Example #6
0
class CommCareCaseAttachment(LooselyEqualDocumentSchema, UnicodeMixIn):
    identifier = StringProperty()
    attachment_src = StringProperty()
    attachment_from = StringProperty()
    attachment_name = StringProperty()
    server_mime = StringProperty()  # Server detected MIME
    server_md5 = StringProperty()  # Couch detected hash

    attachment_size = IntegerProperty()  # file size
    attachment_properties = DictProperty(
    )  # width, height, other relevant metadata

    @property
    def is_image(self):
        if self.server_mime is None:
            return None
        return True if self.server_mime.startswith('image/') else False

    @property
    def is_present(self):
        """
        Helper method to see if this is a delete vs. update
        """
        if self.identifier and (self.attachment_src == self.attachment_from is
                                None):
            return False
        else:
            return True

    @property
    def attachment_key(self):
        return self.identifier

    @classmethod
    def from_case_index_update(cls, attachment):
        if attachment.attachment_src:
            guessed = mimetypes.guess_type(attachment.attachment_src)
            if len(guessed) > 0 and guessed[0] is not None:
                mime_type = guessed[0]
            else:
                mime_type = None

            ret = cls(identifier=attachment.identifier,
                      attachment_src=attachment.attachment_src,
                      attachment_from=attachment.attachment_from,
                      attachment_name=attachment.attachment_name,
                      server_mime=mime_type)
        else:
            ret = cls(identifier=attachment.identifier)
        return ret
Example #7
0
class OpenmrsImporter(Document):
    """
    Import cases from an OpenMRS instance using a report
    """
    domain = StringProperty()
    server_url = StringProperty()  # e.g. "http://www.example.com/openmrs"
    username = StringProperty()
    password = StringProperty()

    # If a domain has multiple OpenmrsImporter instances, for which CommCare location is this one authoritative?
    location_id = StringProperty()

    # How often should cases be imported
    import_frequency = StringProperty(choices=IMPORT_FREQUENCY_CHOICES,
                                      default=IMPORT_FREQUENCY_MONTHLY)

    log_level = IntegerProperty()

    # OpenMRS UUID of the report of patients to be imported
    report_uuid = StringProperty()

    # Can include template params, e.g. {"endDate": "{{ today }}"}
    # Available template params: "today", "location"
    report_params = DictProperty()

    # The case type of imported cases
    case_type = StringProperty()

    # The ID of the owner of imported cases, if all imported cases are to have the same owner. To assign imported
    # cases to different owners, see `location_type` below.
    owner_id = StringProperty()

    # If report_params includes "{{ location }}" then location_type_name is used to determine which locations to
    # pull the report for. Those locations will need an "openmrs_uuid" param set. Imported cases will be owned by
    # the first mobile worker assigned to that location. If this OpenmrsImporter.location_id is set, only
    # sub-locations will be returned
    location_type_name = StringProperty()

    # external_id should always be the OpenMRS UUID of the patient (and not, for example, a national ID number)
    # because it is immutable. external_id_column is the column that contains the UUID
    external_id_column = StringProperty()

    # Space-separated column(s) to be concatenated to create the case name (e.g. "givenName familyName")
    name_columns = StringProperty()

    column_map = ListProperty(ColumnMapping)

    def __str__(self):
        return self.server_url
Example #8
0
class DataSourceBuildInformation(DocumentSchema):
    """
    A class to encapsulate meta information about the process through which
    its DataSourceConfiguration was configured and built.
    """
    # Either the case type or the form xmlns that this data source is based on.
    source_id = StringProperty()
    # The app that the form belongs to, or the app that was used to infer the case properties.
    app_id = StringProperty()
    # The version of the app at the time of the data source's configuration.
    app_version = IntegerProperty()
    # True if the data source has been built, that is, if the corresponding SQL table has been populated.
    finished = BooleanProperty(default=False)
    # Start time of the most recent build SQL table celery task.
    initiated = DateTimeProperty()
Example #9
0
class DataSetMap(Document):
    # domain and UCR uniquely identify a DataSetMap
    domain = StringProperty()
    connection_settings_id = IntegerProperty(required=False, default=None)
    ucr_id = StringProperty()  # UCR ReportConfig id

    description = StringProperty()
    frequency = StringProperty(choices=SEND_FREQUENCIES,
                               default=SEND_FREQUENCY_MONTHLY)
    # Day of the month for monthly/quarterly frequency. Day of the week
    # for weekly frequency. Uses ISO-8601, where Monday = 1, Sunday = 7.
    day_to_send = IntegerProperty()
    data_set_id = StringProperty()  # If UCR adds values to an existing DataSet
    org_unit_id = StringProperty(
    )  # If all values are for the same OrganisationUnit.
    org_unit_column = StringProperty(
    )  # if not org_unit_id: use org_unit_column
    period = StringProperty(
    )  # If all values are for the same period. Monthly is YYYYMM, quarterly is YYYYQ#
    period_column = StringProperty()  # if not period: use period_column

    attribute_option_combo_id = StringProperty(
    )  # Optional. DHIS2 defaults this to categoryOptionCombo
    complete_date = StringProperty()  # Optional

    datavalue_maps = SchemaListProperty(DataValueMap)

    @property
    def connection_settings(self):
        if self.connection_settings_id:
            return ConnectionSettings.objects.get(
                pk=self.connection_settings_id)

    @property
    def pk(self):
        return self._id
Example #10
0
class CallCenterProperties(DocumentSchema):
    enabled = BooleanProperty(default=False)
    use_fixtures = BooleanProperty(default=True)

    case_owner_id = StringProperty()
    use_user_location_as_owner = BooleanProperty(default=False)
    user_location_ancestor_level = IntegerProperty(default=0)

    case_type = StringProperty()

    def fixtures_are_active(self):
        return self.enabled and self.use_fixtures

    def config_is_valid(self):
        return (self.use_user_location_as_owner or self.case_owner_id) and self.case_type
Example #11
0
class TempCommCareUser(CommCareUser):
    filter_flag = IntegerProperty()

    def __init__(self, domain, username, uuid):
        if username == HQUserType.human_readable[HQUserType.DEMO_USER]:
            filter_flag = HQUserType.DEMO_USER
        elif username == HQUserType.human_readable[HQUserType.ADMIN]:
            filter_flag = HQUserType.ADMIN
        else:
            filter_flag = HQUserType.UNKNOWN
        super(TempCommCareUser, self).__init__(domain=domain,
                                               username=username,
                                               _id=uuid,
                                               date_joined=datetime.utcnow(),
                                               is_active=False,
                                               user_data={},
                                               first_name='',
                                               last_name='',
                                               filter_flag=filter_flag)

    def save(self, **params):
        raise NotImplementedError

    @property
    def userID(self):
        return self._id

    @property
    def username_in_report(self):
        if self.filter_flag == HQUserType.UNKNOWN:
            final = mark_safe('%s <strong>[unregistered]</strong>' %
                              html.escape(self.username))
        elif self.filter_flag == HQUserType.DEMO_USER:
            final = mark_safe('<strong>%s</strong>' %
                              html.escape(self.username))
        else:
            final = mark_safe(
                '<strong>%s</strong> (%s)' %
                tuple(map(html.escape, [self.username, self.user_id])))
        return final

    @property
    def raw_username(self):
        return self.username

    class Meta(object):
        app_label = 'reports'
Example #12
0
class AuditCommand(AuditEvent):
    """
    Audit wrapper class to capture environmental information around a management command run.
    """
    sudo_user = StringProperty()

    # ip address if available of logged in user running cmd
    ip_address = StringProperty()
    pid = IntegerProperty()

    @classmethod
    def audit_command(cls):
        """
        Log a management command with available information

        The command line run will be recorded in the self.description
        """
        audit = cls.create_audit(cls, None)
        puname = platform.uname()
        audit.user = os.environ.get('USER', None)
        audit.pid = os.getpid()

        if 'SUDO_COMMAND' in os.environ:
            audit.description = os.environ.get('SUDO_COMMAND', None)
            audit.sudo_user = os.environ.get('SUDO_USER', None)
        else:

            # Note: this is a work in progress
            # getting command line arg from a pid is a system specific trick
            # only supporting linux at this point, adding other OS's can be done later
            # This is largely for production logging of these commands.
            if puname[0] == 'Linux':
                with open('/proc/%s/cmdline' % audit.pid,
                          'r',
                          encoding='utf-8') as fin:
                    cmd_args = fin.read()
                    audit.description = cmd_args.replace('\0', ' ')
            elif puname[0] == 'Darwin':
                # mac osx
                # TODO
                pass
            elif puname[0] == 'Windows':
                # TODO
                pass

        audit.save()
Example #13
0
class CommCareCaseAttachment(LooselyEqualDocumentSchema, IsImageMixin):
    identifier = StringProperty()
    attachment_src = StringProperty()
    attachment_from = StringProperty()
    attachment_name = StringProperty()
    server_mime = StringProperty()  # Server detected MIME
    server_md5 = StringProperty()  # Couch detected hash

    attachment_size = IntegerProperty()  # file size
    attachment_properties = DictProperty(
    )  # width, height, other relevant metadata

    @property
    def content_type(self):
        return self.server_mime

    @property
    def is_present(self):
        """
        Helper method to see if this is a delete vs. update

        NOTE this is related to but reversed logic from
        `casexml.apps.case.xml.parser.CaseAttachment.is_delete`.
        """
        return self.attachment_src or self.attachment_from

    @classmethod
    def from_case_index_update(cls, attachment):
        if attachment.attachment_src or attachment.attachment_from:
            guessed = mimetypes.guess_type(attachment.attachment_src)
            if len(guessed) > 0 and guessed[0] is not None:
                mime_type = guessed[0]
            else:
                mime_type = None

            ret = cls(identifier=attachment.identifier,
                      attachment_src=attachment.attachment_src,
                      attachment_from=attachment.attachment_from,
                      attachment_name=attachment.attachment_name,
                      server_mime=mime_type)
        else:
            ret = cls(identifier=attachment.identifier)
        return ret
Example #14
0
class CallCenterProperties(DocumentSchema):
    enabled = BooleanProperty(default=False)
    use_fixtures = BooleanProperty(default=True)

    case_owner_id = StringProperty()
    use_user_location_as_owner = BooleanProperty(default=False)
    user_location_ancestor_level = IntegerProperty(default=0)

    case_type = StringProperty()

    form_datasource_enabled = BooleanProperty(default=True)
    case_datasource_enabled = BooleanProperty(default=True)
    case_actions_datasource_enabled = BooleanProperty(default=True)

    def fixtures_are_active(self):
        return self.enabled and self.use_fixtures

    def config_is_valid(self):
        return (self.use_user_location_as_owner
                or self.case_owner_id) and self.case_type

    def update_from_app_config(self, config):
        """Update datasources enabled based on app config.

        Follows similar logic to CallCenterIndicators
        :returns: True if changes were made
        """
        pre = (self.form_datasource_enabled, self.case_datasource_enabled,
               self.case_actions_datasource_enabled)
        self.form_datasource_enabled = config.forms_submitted.enabled or bool(
            config.custom_form)
        self.case_datasource_enabled = (config.cases_total.enabled
                                        or config.cases_opened.enabled
                                        or config.cases_closed.enabled)
        self.case_actions_datasource_enabled = config.cases_active.enabled
        post = (self.form_datasource_enabled, self.case_datasource_enabled,
                self.case_actions_datasource_enabled)
        return pre != post
Example #15
0
class FormQuestionSchema(Document):
    """
    Contains information about the questions for a specific form
    specifically the options that are available (or have ever been available) for
    any multi-select questions.
    Calling `update_schema` will load the app and any saved versions of the app
    that have not already been processed and update the question schema with
    any new options.
    """
    domain = StringProperty(required=True)
    app_id = StringProperty(required=True)
    xmlns = StringProperty(required=True)

    last_processed_version = IntegerProperty(default=0)
    processed_apps = SetProperty(str)
    apps_with_errors = SetProperty(str)
    question_schema = SchemaDictProperty(QuestionMeta)

    class Meta(object):
        app_label = 'export'

    @classmethod
    def _get_id(cls, domain, app_id, xmlns):
        def _none_to_empty_string(str):
            return str if str is not None else ''

        key = list(map(_none_to_empty_string, [domain, app_id, xmlns]))
        return hashlib.sha1(':'.join(key).encode('utf-8')).hexdigest()

    @classmethod
    def get_by_key(cls, domain, app_id, xmlns):
        _id = cls._get_id(domain, app_id, xmlns)
        return cls.get(_id)

    @classmethod
    def get_or_create(cls, domain, app_id, xmlns):
        try:
            schema = cls.get_by_key(domain, app_id, xmlns)
        except ResourceNotFound:
            old_schemas = FormQuestionSchema.view(
                'form_question_schema/by_xmlns',
                key=[domain, app_id, xmlns],
                include_docs=True).all()

            if old_schemas:
                doc = old_schemas[0].to_json()
                del doc['_id']
                del doc['_rev']
                schema = FormQuestionSchema.wrap(doc)
                schema.save()

                for old in old_schemas:
                    old.delete()
            else:
                schema = FormQuestionSchema(domain=domain,
                                            app_id=app_id,
                                            xmlns=xmlns)
                schema.save()

        return schema

    def validate(self, required=True):
        # this isn't always set, so set to empty strings if not found
        if self.app_id is None:
            self.app_id = ''

        super(FormQuestionSchema, self).validate(required=required)
        if not self.get_id:
            self._id = self._get_id(self.domain, self.app_id, self.xmlns)

    def update_schema(self):
        all_app_ids = get_build_ids_after_version(self.domain, self.app_id,
                                                  self.last_processed_version)

        all_seen_apps = self.apps_with_errors | self.processed_apps
        to_process = [
            app_id for app_id in all_app_ids if app_id not in all_seen_apps
        ]
        if self.app_id not in all_seen_apps:
            to_process.append(self.app_id)

        for app_doc in iter_docs(Application.get_db(), to_process):
            if is_remote_app(app_doc):
                continue
            app = Application.wrap(app_doc)
            try:
                self.update_for_app(app)
            except AppManagerException:
                self.apps_with_errors.add(app.get_id)
                self.last_processed_version = app.version

        if to_process:
            self.save()

    def update_for_app(self, app):
        xform = app.get_xform_by_xmlns(self.xmlns, log_missing=False)
        if xform:
            prefix = '/{}/'.format(xform.data_node.tag_name)

            def to_json_path(xml_path):
                if not xml_path:
                    return

                if xml_path.startswith(prefix):
                    xml_path = xml_path[len(prefix):]
                return 'form.{}'.format(xml_path.replace('/', '.'))

            for question in xform.get_questions(app.langs):
                question_path = to_json_path(question['value'])
                if question['tag'] == 'select':
                    meta = self.question_schema.get(
                        question_path,
                        QuestionMeta(
                            repeat_context=to_json_path(question['repeat'])))
                    for opt in question['options']:
                        if opt['value'] not in meta.options:
                            meta.options.append(opt['value'])

                    self.question_schema[question_path] = meta
                else:
                    # In the event that a question was previously a multi-select and not one any longer,
                    # we need to clear the question schema
                    self.question_schema.pop(question_path, None)

        self.processed_apps.add(app.get_id)
        self.last_processed_version = app.version
Example #16
0
class OpenmrsImporter(Document):
    """
    Import cases from an OpenMRS instance using a report
    """
    domain = StringProperty()

    # TODO: (2020-03-06) Migrate to ConnectionSettings
    server_url = StringProperty()  # e.g. "http://www.example.com/openmrs"
    username = StringProperty()
    password = StringProperty()

    notify_addresses_str = StringProperty(
        default="")  # See also notify_addresses()

    # If a domain has multiple OpenmrsImporter instances, for which CommCare location is this one authoritative?
    location_id = StringProperty()

    # How often should cases be imported
    import_frequency = StringProperty(choices=IMPORT_FREQUENCY_CHOICES,
                                      default=IMPORT_FREQUENCY_MONTHLY)

    log_level = IntegerProperty()

    # Timezone name. If not specified, the domain's timezone will be used.
    timezone = StringProperty()

    # OpenMRS UUID of the report of patients to be imported
    report_uuid = StringProperty()

    # Can include template params, e.g. {"endDate": "{{ today }}"}
    # Available template params: "today", "location"
    report_params = DictProperty()

    # The case type of imported cases
    case_type = StringProperty()

    # The ID of the owner of imported cases, if all imported cases are to have the same owner. To assign imported
    # cases to different owners, see `location_type` below.
    owner_id = StringProperty()

    # If report_params includes "{{ location }}" then location_type_name is used to determine which locations to
    # pull the report for. Those locations will need an "openmrs_uuid" param set. Imported cases will be owned by
    # the first mobile worker assigned to that location. If this OpenmrsImporter.location_id is set, only
    # sub-locations will be returned
    location_type_name = StringProperty()

    # external_id should always be the OpenMRS UUID of the patient (and not, for example, a national ID number)
    # because it is immutable. external_id_column is the column that contains the UUID
    external_id_column = StringProperty()

    # Space-separated column(s) to be concatenated to create the case name (e.g. "givenName familyName")
    name_columns = StringProperty()

    column_map = ListProperty(ColumnMapping)

    def __str__(self):
        url = "@".join((self.username,
                        self.server_url)) if self.username else self.server_url
        return f"<{self.__class__.__name__} {self._id} {url}>"

    @property
    def notify_addresses(self):
        return [
            addr for addr in re.split('[, ]+', self.notify_addresses_str)
            if addr
        ]

    @memoized
    def get_timezone(self):
        if self.timezone:
            return coerce_timezone_value(self.timezone)
        else:
            return get_timezone_for_domain(self.domain)

    def should_import_today(self):
        today = datetime.today()
        return (self.import_frequency == IMPORT_FREQUENCY_DAILY
                or (self.import_frequency == IMPORT_FREQUENCY_WEEKLY
                    and today.weekday() == 1  # Tuesday
                    ) or (self.import_frequency == IMPORT_FREQUENCY_MONTHLY
                          and today.day == 1))
Example #17
0
class DataSetMap(Document):
    # domain and UCR uniquely identify a DataSetMap
    domain = StringProperty()
    ucr_id = StringProperty()  # UCR ReportConfig id

    description = StringProperty()
    frequency = StringProperty(choices=SEND_FREQUENCIES, default=SEND_FREQUENCY_MONTHLY)
    day_to_send = IntegerProperty()
    data_set_id = StringProperty()  # If UCR adds values to an existing DataSet
    org_unit_id = StringProperty()  # If all values are for the same OrganisationUnit.
    org_unit_column = StringProperty()  # if not org_unit_id: use org_unit_column
    period = StringProperty()  # If all values are for the same period. Monthly is YYYYMM, quarterly is YYYYQ#
    period_column = StringProperty()  # if not period: use period_column

    attribute_option_combo_id = StringProperty()  # Optional. DHIS2 defaults this to categoryOptionCombo
    complete_date = StringProperty()  # Optional

    datavalue_maps = SchemaListProperty(DataValueMap)

    @quickcache(['self.domain', 'self.ucr_id'])
    def get_datavalue_map_dict(self):
        dict_ = {dvm.column: dict(dvm, is_org_unit=False, is_period=False) for dvm in self.datavalue_maps}
        if self.org_unit_column:
            dict_[self.org_unit_column] = {'is_org_unit': True, 'is_period': False}
        if self.period_column:
            dict_[self.period_column] = {'is_org_unit': False, 'is_period': True}
        return dict_

    def get_datavalues(self, ucr_row):
        """
        Returns rows of "dataElement", "categoryOptionCombo", "value", and optionally "period", "orgUnit" and
        "comment" for this DataSet where ucr_row looks like::

            {
                "org_unit_id": "ABC",
                "data_element_cat_option_combo_1": 123,
                "data_element_cat_option_combo_2": 456,
                "data_element_cat_option_combo_3": 789,
            }

        """
        dv_map = self.get_datavalue_map_dict()
        datavalues = []
        org_unit = None
        period = None
        # First pass is to collate data element IDs and values
        for key, value in ucr_row.items():
            if key in dv_map:
                if dv_map[key]['is_org_unit']:
                    org_unit = value
                elif dv_map[key]['is_period']:
                    period = value
                else:
                    datavalue = {
                        'dataElement': dv_map[key]['data_element_id'],
                        'categoryOptionCombo': dv_map[key]['category_option_combo_id'],
                        'value': value,
                    }
                    if dv_map[key].get('comment'):
                        datavalue['comment'] = dv_map[key]['comment']
                    datavalues.append(datavalue)
        # Second pass is to set period and org unit
        if period or org_unit:
            for datavalue in datavalues:
                if period:
                    datavalue['period'] = period
                if org_unit:
                    datavalue['orgUnit'] = org_unit
        return datavalues

    def get_dataset(self, send_date):
        report_config = get_report_config(self.domain, self.ucr_id)
        date_filter = get_date_filter(report_config)

        if self.frequency == SEND_FREQUENCY_MONTHLY:
            date_range = get_previous_month(send_date)
            period = date_range.startdate.strftime('%Y%m')
        elif self.frequency == SEND_FREQUENCY_QUARTERLY:
            date_range = get_previous_quarter(send_date)
            period = date_range.startdate.strftime('%Y') + 'Q' + str((date_range.startdate.month // 3) + 1)
        ucr_data = get_ucr_data(report_config, date_filter, date_range)

        datavalues = (self.get_datavalues(row) for row in ucr_data)  # one UCR row may have many DataValues
        dataset = {
            'dataValues': list(chain.from_iterable(datavalues))  # get a single list of DataValues
        }
        if self.data_set_id:
            dataset['dataSet'] = self.data_set_id
        if self.org_unit_id:
            dataset['orgUnit'] = self.org_unit_id
        if self.period:
            dataset['period'] = self.period
        elif not self.period_column:
            dataset['period'] = period
        if self.attribute_option_combo_id:
            dataset['attributeOptionCombo'] = self.attribute_option_combo_id
        if self.complete_date:
            dataset['completeDate'] = self.complete_date
        return dataset

    def should_send_on_date(self, send_date):
        return self.day_to_send == send_date.day and (
            self.frequency == SEND_FREQUENCY_MONTHLY or
            self.frequency == SEND_FREQUENCY_QUARTERLY and send_date.month in [1, 4, 7, 10])
Example #18
0
class AccessAudit(AuditEvent):
    access_type = StringProperty(choices=ACCESS_CHOICES)
    ip_address = StringProperty()
    session_key = StringProperty() #the django auth session key

    user_agent = StringProperty()

    get_data = StringListProperty()
    post_data = StringListProperty()
    http_accept = StringProperty()
    path_info = StringProperty()

    failures_since_start = IntegerProperty()

    class Meta(object):
        app_label = 'auditcare'

    @property
    def summary(self):
        return "%s from %s" % (self.access_type, self.ip_address)


    @classmethod
    def audit_login(cls, request, user, *args, **kwargs):
        '''Creates an instance of a Access log.
        '''
        audit = cls.create_audit(cls, user)
        audit.ip_address = utils.get_ip(request)
        ua = request.META.get('HTTP_USER_AGENT', '<unknown>')
        audit.http_accept = request.META.get('HTTP_ACCEPT', '<unknown>')
        audit.path_info = request.META.get('PATH_INFO', '<unknown>')
        audit.user_agent = ua
        audit.access_type = 'login'
        audit.description = "Login Success"
        audit.session_key = request.session.session_key
        audit.get_data = [] #[query2str(request.GET.items())]
        audit.post_data = []
        audit.save()

    @classmethod
    def audit_login_failed(cls, request, username, *args, **kwargs):
        '''Creates an instance of a Access log.
        '''
        audit = cls.create_audit(cls, username)
        audit.ip_address = utils.get_ip(request)
        audit.access_type = 'login_failed'
        if username != None:
            audit.description = "Login Failure: %s" % (username)
        else:
            audit.description = "Login Failure"
        audit.session_key = request.session.session_key
        audit.save()

    @classmethod
    def audit_logout(cls, request, user):
        '''Log a logout event'''
        audit = cls.create_audit(cls, user)
        audit.ip_address = utils.get_ip(request)

        if user == AnonymousUser:
            audit.description = "Logout anonymous"
        elif user is None:
            audit.description = "None"
        else:
            audit.description = "Logout %s" % (user.username)
        audit.access_type = 'logout'
        audit.session_key = request.session.session_key
        audit.save()
Example #19
0
class ElasticSearchIndexSettings(DocumentSchema):
    refresh_interval = StringProperty(default="5s")
    number_of_shards = IntegerProperty(default=2)
Example #20
0
class Dhis2Connection(Document):
    domain = StringProperty()
    server_url = StringProperty()
    username = StringProperty()
    password = StringProperty()
    log_level = IntegerProperty()
Example #21
0
class CObservation(OldDocument):
    doc_id = StringProperty()
    patient = StringProperty()  # case id

    pact_id = StringProperty()  # patient pact id
    provider = StringProperty()

    encounter_date = OldDateTimeProperty()
    anchor_date = OldDateTimeProperty()
    observed_date = OldDateTimeProperty()

    submitted_date = OldDateTimeProperty()
    created_date = OldDateTimeProperty()

    is_art = BooleanProperty()
    dose_number = IntegerProperty()
    total_doses = IntegerProperty()
    adherence = StringProperty()

    # DOT_OBSERVATION_ types
    method = StringProperty()

    is_reconciliation = BooleanProperty(default=False)

    day_index = IntegerProperty()

    # if there's something for that particular day, then it'll be here
    day_note = StringProperty()
    # new addition, if there's a slot for the day label, then retain it
    day_slot = IntegerProperty()
    # this is for the overall note for that submission,
    # will exist on the anchor date
    note = StringProperty()

    @classmethod
    def wrap(cls, obj):
        ints = ['dose_number', 'total_doses', 'day_index', 'day_slot']
        for prop_name in ints:
            val = obj.get(prop_name)
            if val and isinstance(val, six.string_types):
                obj[prop_name] = int(val)
        return super(CObservation, cls).wrap(obj)

    @property
    def obs_score(self):
        """Gets the relative score of the observation.
        """
        if self.method == "direct":
            return 3
        if self.method == "pillbox":
            return 2
        if self.method == "self":
            return 1

    @property
    def adinfo(self):
        """helper function to concatenate adherence and method to check for conflicts"""
        return ((self.is_art, self.dose_number, self.total_doses), "%s" % (self.adherence))

    class Meta:
        app_label = 'pact'

    def __unicode__(self):
        return "Obs %s [%s] %d/%d" % (json_format_date(self.observed_date), "ART" if self.is_art else "NonART", self.dose_number+1, self.total_doses)

    def __str__(self):
        return "Obs %s [%s] %d/%d" % (json_format_date(self.observed_date), "ART" if self.is_art else "NonART", self.dose_number+1, self.total_doses)

    def __repr__(self):
        return json.dumps(self.to_json(), indent=4)
Example #22
0
class Domain(QuickCachedDocumentMixin, BlobMixin, Document, SnapshotMixin):
    """
        Domain is the highest level collection of people/stuff
        in the system.  Pretty much everything happens at the
        domain-level, including user membership, permission to
        see data, reports, charts, etc.

        Exceptions: accounting has some models that combine multiple domains,
        which make "enterprise" multi-domain features like the enterprise dashboard possible.

        Naming conventions:
        Most often, variables representing domain names are named `domain`, and
        variables representing domain objects are named `domain_obj`. New code should
        follow this convention, unless it's in an area that consistently uses `domain`
        for the object and `domain_name` for the string.

        There's a `project` attribute attached to requests that's a domain object.
        In spite of this, don't use `project` in new code.
   """

    _blobdb_type_code = BLOB_CODES.domain

    name = StringProperty()
    is_active = BooleanProperty()
    date_created = DateTimeProperty()
    default_timezone = StringProperty(
        default=getattr(settings, "TIME_ZONE", "UTC"))
    case_sharing = BooleanProperty(default=False)
    secure_submissions = BooleanProperty(default=False)
    cloudcare_releases = StringProperty(
        choices=['stars', 'nostars', 'default'], default='default')
    organization = StringProperty()
    hr_name = StringProperty()  # the human-readable name for this project
    project_description = StringProperty()  # Brief description of the project
    creating_user = StringProperty(
    )  # username of the user who created this domain

    # domain metadata
    project_type = StringProperty()  # e.g. MCH, HIV
    customer_type = StringProperty()  # plus, full, etc.
    is_test = StringProperty(choices=["true", "false", "none"], default="none")
    description = StringProperty()
    short_description = StringProperty()
    is_shared = BooleanProperty(default=False)
    commtrack_enabled = BooleanProperty(default=False)
    call_center_config = SchemaProperty(CallCenterProperties)
    restrict_superusers = BooleanProperty(default=False)
    allow_domain_requests = BooleanProperty(default=False)
    location_restriction_for_users = BooleanProperty(default=False)
    usercase_enabled = BooleanProperty(default=False)
    hipaa_compliant = BooleanProperty(default=False)
    use_sql_backend = BooleanProperty(default=False)
    first_domain_for_user = BooleanProperty(default=False)

    case_display = SchemaProperty(CaseDisplaySettings)

    # CommConnect settings
    survey_management_enabled = BooleanProperty(default=False)
    # Whether or not a case can register via sms
    sms_case_registration_enabled = BooleanProperty(default=False)
    # Case type to apply to cases registered via sms
    sms_case_registration_type = StringProperty()
    # Owner to apply to cases registered via sms
    sms_case_registration_owner_id = StringProperty()
    # Submitting user to apply to cases registered via sms
    sms_case_registration_user_id = StringProperty()
    # Whether or not a mobile worker can register via sms
    sms_mobile_worker_registration_enabled = BooleanProperty(default=False)
    use_default_sms_response = BooleanProperty(default=False)
    default_sms_response = StringProperty()
    chat_message_count_threshold = IntegerProperty()
    sms_language_fallback = StringProperty()
    custom_chat_template = StringProperty(
    )  # See settings.CUSTOM_CHAT_TEMPLATES
    custom_case_username = StringProperty(
    )  # Case property to use when showing the case's name in a chat window
    # If empty, sms can be sent at any time. Otherwise, only send during
    # these windows of time. SMS_QUEUE_ENABLED must be True in localsettings
    # for this be considered.
    restricted_sms_times = SchemaListProperty(DayTimeWindow)
    # If empty, this is ignored. Otherwise, the framework will make sure
    # that during these days/times, no automated outbound sms will be sent
    # to someone if they have sent in an sms within sms_conversation_length
    # minutes. Outbound sms sent from a user in a chat window, however, will
    # still be sent. This is meant to prevent chat conversations from being
    # interrupted by automated sms reminders.
    # SMS_QUEUE_ENABLED must be True in localsettings for this to be
    # considered.
    sms_conversation_times = SchemaListProperty(DayTimeWindow)
    # In minutes, see above.
    sms_conversation_length = IntegerProperty(default=10)
    # Set to True to prevent survey questions and answers form being seen in
    # SMS chat windows.
    filter_surveys_from_chat = BooleanProperty(default=False)
    # The below option only matters if filter_surveys_from_chat = True.
    # If set to True, invalid survey responses will still be shown in the chat
    # window, while questions and valid responses will be filtered out.
    show_invalid_survey_responses_in_chat = BooleanProperty(default=False)
    # If set to True, if a message is read by anyone it counts as being read by
    # everyone. Set to False so that a message is only counted as being read
    # for a user if only that user has read it.
    count_messages_as_read_by_anyone = BooleanProperty(default=False)
    enable_registration_welcome_sms_for_case = BooleanProperty(default=False)
    enable_registration_welcome_sms_for_mobile_worker = BooleanProperty(
        default=False)
    sms_survey_date_format = StringProperty()

    granted_messaging_access = BooleanProperty(default=False)

    # Allowed outbound SMS per day
    # If this is None, then the default is applied. See get_daily_outbound_sms_limit()
    custom_daily_outbound_sms_limit = IntegerProperty()

    # Allowed number of case updates or closes from automatic update rules in the daily rule run.
    # If this value is None, the value in settings.MAX_RULE_UPDATES_IN_ONE_RUN is used.
    auto_case_update_limit = IntegerProperty()

    # Allowed number of max OData feeds that this domain can create.
    # If this value is None, the value in settings.DEFAULT_ODATA_FEED_LIMIT is used
    odata_feed_limit = IntegerProperty()

    # exchange/domain copying stuff
    is_snapshot = BooleanProperty(default=False)
    is_approved = BooleanProperty(default=False)
    snapshot_time = DateTimeProperty()
    published = BooleanProperty(default=False)
    license = StringProperty(choices=LICENSES, default='cc')
    title = StringProperty()
    cda = SchemaProperty(LicenseAgreement)
    multimedia_included = BooleanProperty(default=True)
    downloads = IntegerProperty(
        default=0)  # number of downloads for this specific snapshot
    full_downloads = IntegerProperty(
        default=0)  # number of downloads for all snapshots from this domain
    author = StringProperty()
    phone_model = StringProperty()
    attribution_notes = StringProperty()
    publisher = StringProperty(choices=["organization", "user"],
                               default="user")
    yt_id = StringProperty()
    snapshot_head = BooleanProperty(default=False)

    deployment = SchemaProperty(Deployment)

    cached_properties = DictProperty()

    internal = SchemaProperty(InternalProperties)

    dynamic_reports = SchemaListProperty(DynamicReportSet)

    # extra user specified properties
    tags = StringListProperty()
    area = StringProperty(choices=AREA_CHOICES)
    sub_area = StringProperty(choices=SUB_AREA_CHOICES)
    launch_date = DateTimeProperty

    last_modified = DateTimeProperty(default=datetime(2015, 1, 1))

    # when turned on, use SECURE_TIMEOUT for sessions of users who are members of this domain
    secure_sessions = BooleanProperty(default=False)

    two_factor_auth = BooleanProperty(default=False)
    strong_mobile_passwords = BooleanProperty(default=False)

    requested_report_builder_subscription = StringListProperty()

    report_whitelist = StringListProperty()

    # seconds between sending mobile UCRs to users. Can be overridden per user
    default_mobile_ucr_sync_interval = IntegerProperty()

    @classmethod
    def wrap(cls, data):
        # for domains that still use original_doc
        should_save = False
        if 'original_doc' in data:
            original_doc = data['original_doc']
            del data['original_doc']
            should_save = True
            if original_doc:
                original_doc = Domain.get_by_name(original_doc)
                data['copy_history'] = [original_doc._id]

        # for domains that have a public domain license
        if 'license' in data:
            if data.get("license", None) == "public":
                data["license"] = "cc"
                should_save = True

        if 'slug' in data and data["slug"]:
            data["hr_name"] = data["slug"]
            del data["slug"]

        if 'is_test' in data and isinstance(data["is_test"], bool):
            data["is_test"] = "true" if data["is_test"] else "false"
            should_save = True

        if 'cloudcare_releases' not in data:
            data['cloudcare_releases'] = 'nostars'  # legacy default setting

        # Don't actually remove location_types yet.  We can migrate fully and
        # remove this after everything's hunky-dory in production.  2015-03-06
        if 'location_types' in data:
            data['obsolete_location_types'] = data.pop('location_types')

        if 'granted_messaging_access' not in data:
            # enable messaging for domains created before this flag was added
            data['granted_messaging_access'] = True

        self = super(Domain, cls).wrap(data)
        if self.deployment is None:
            self.deployment = Deployment()
        if should_save:
            self.save()
        return self

    def get_default_timezone(self):
        """return a timezone object from self.default_timezone"""
        import pytz
        return pytz.timezone(self.default_timezone)

    @staticmethod
    @quickcache(['name'], timeout=24 * 60 * 60)
    def is_secure_session_required(name):
        domain_obj = Domain.get_by_name(name)
        return domain_obj and domain_obj.secure_sessions

    @staticmethod
    @quickcache(['couch_user._id', 'is_active'],
                timeout=5 * 60,
                memoize_timeout=10)
    def active_for_couch_user(couch_user, is_active=True):
        domain_names = couch_user.get_domains()
        return Domain.view(
            "domain/by_status",
            keys=[[is_active, d] for d in domain_names],
            reduce=False,
            include_docs=True,
        ).all()

    @staticmethod
    def active_for_user(user, is_active=True):
        if isinstance(user, AnonymousUser):
            return []
        from corehq.apps.users.models import CouchUser
        if isinstance(user, CouchUser):
            couch_user = user
        else:
            couch_user = CouchUser.from_django_user(user)
        if couch_user:
            return Domain.active_for_couch_user(couch_user,
                                                is_active=is_active)
        else:
            return []

    def add(self, model_instance, is_active=True):
        """
        Add something to this domain, through the generic relation.
        Returns the created membership object
        """
        # Add membership info to Couch
        couch_user = model_instance.get_profile().get_couch_user()
        couch_user.add_domain_membership(self.name)
        couch_user.save()

    def applications(self):
        return get_brief_apps_in_domain(self.name)

    def full_applications(self, include_builds=True):
        from corehq.apps.app_manager.util import get_correct_app_class
        from corehq.apps.app_manager.models import Application

        def wrap_application(a):
            return get_correct_app_class(a['doc']).wrap(a['doc'])

        if include_builds:
            startkey = [self.name]
            endkey = [self.name, {}]
        else:
            startkey = [self.name, None]
            endkey = [self.name, None, {}]

        return Application.get_db().view('app_manager/applications',
                                         startkey=startkey,
                                         endkey=endkey,
                                         include_docs=True,
                                         wrapper=wrap_application).all()

    @cached_property
    def versions(self):
        apps = self.applications()
        return list(set(a.application_version for a in apps))

    @cached_property
    def has_media(self):
        from corehq.apps.app_manager.util import is_remote_app
        for app in self.full_applications():
            if not is_remote_app(app) and app.has_media():
                return True
        return False

    @property
    def use_cloudcare_releases(self):
        return self.cloudcare_releases != 'nostars'

    def all_users(self):
        from corehq.apps.users.models import CouchUser
        return CouchUser.by_domain(self.name)

    def recent_submissions(self):
        return domain_has_submission_in_last_30_days(self.name)

    @classmethod
    @quickcache(['name'],
                skip_arg='strict',
                timeout=30 * 60,
                session_function=icds_conditional_session_key())
    def get_by_name(cls, name, strict=False):
        if not name:
            # get_by_name should never be called with name as None (or '', etc)
            # I fixed the code in such a way that if I raise a ValueError
            # all tests pass and basic pages load,
            # but in order not to break anything in the wild,
            # I'm opting to notify by email if/when this happens
            # but fall back to the previous behavior of returning None
            if settings.DEBUG:
                raise ValueError('%r is not a valid domain name' % name)
            else:
                _assert = soft_assert(notify_admins=True,
                                      exponential_backoff=False)
                _assert(False, '%r is not a valid domain name' % name)
                return None

        def _get_by_name(stale=False):
            extra_args = {'stale': settings.COUCH_STALE_QUERY} if stale else {}
            result = cls.view("domain/domains",
                              key=name,
                              reduce=False,
                              include_docs=True,
                              **extra_args).first()
            if not isinstance(result, Domain):
                # A stale view may return a result with no doc if the doc has just been deleted.
                # In this case couchdbkit just returns the raw view result as a dict
                return None
            else:
                return result

        domain = _get_by_name(stale=(not strict))
        if domain is None and not strict:
            # on the off chance this is a brand new domain, try with strict
            domain = _get_by_name(stale=False)
        return domain

    @classmethod
    def get_or_create_with_name(cls,
                                name,
                                is_active=False,
                                secure_submissions=True,
                                use_sql_backend=False):
        result = cls.view("domain/domains",
                          key=name,
                          reduce=False,
                          include_docs=True).first()
        if result:
            return result
        else:
            new_domain = Domain(
                name=name,
                is_active=is_active,
                date_created=datetime.utcnow(),
                secure_submissions=secure_submissions,
                use_sql_backend=use_sql_backend,
            )
            new_domain.save(**get_safe_write_kwargs())
            return new_domain

    @classmethod
    def generate_name(cls, hr_name, max_length=25):
        '''
        Generate a URL-friendly name based on a given human-readable name.
        Normalizes given name, then looks for conflicting domains, addressing
        conflicts by adding "-1", "-2", etc. May return None if it fails to
        generate a new, unique name. Throws exception if it can't figure out
        a name, which shouldn't happen unless max_length is absurdly short.
        '''
        from corehq.apps.domain.utils import get_domain_url_slug
        from corehq.apps.domain.dbaccessors import domain_or_deleted_domain_exists
        name = get_domain_url_slug(hr_name, max_length=max_length)
        if not name:
            raise NameUnavailableException
        if domain_or_deleted_domain_exists(name):
            prefix = name
            while len(prefix):
                name = next_available_name(
                    prefix, Domain.get_names_by_prefix(prefix + '-'))
                if domain_or_deleted_domain_exists(name):
                    # should never happen
                    raise NameUnavailableException
                if len(name) <= max_length:
                    return name
                prefix = prefix[:-1]
            raise NameUnavailableException

        return name

    @classmethod
    def get_all(cls, include_docs=True):
        domains = Domain.view("domain/not_snapshots", include_docs=False).all()
        if not include_docs:
            return domains
        else:
            return map(cls.wrap,
                       iter_docs(cls.get_db(), [d['id'] for d in domains]))

    @classmethod
    def get_all_names(cls):
        return sorted({d['key'] for d in cls.get_all(include_docs=False)})

    @classmethod
    def get_all_ids(cls):
        return [d['id'] for d in cls.get_all(include_docs=False)]

    @classmethod
    def get_names_by_prefix(cls, prefix):
        return [
            d['key'] for d in Domain.view("domain/domains",
                                          startkey=prefix,
                                          endkey=prefix + "zzz",
                                          reduce=False,
                                          include_docs=False).all()
        ] + [
            d['key'] for d in Domain.view("domain/deleted_domains",
                                          startkey=prefix,
                                          endkey=prefix + "zzz",
                                          reduce=False,
                                          include_docs=False).all()
        ]

    def case_sharing_included(self):
        return self.case_sharing or reduce(lambda x, y: x or y, [
            getattr(app, 'case_sharing', False) for app in self.applications()
        ], False)

    def save(self, **params):
        from corehq.apps.domain.dbaccessors import domain_or_deleted_domain_exists

        self.last_modified = datetime.utcnow()
        if not self._rev:
            if domain_or_deleted_domain_exists(self.name):
                raise NameUnavailableException(self.name)
            # mark any new domain as timezone migration complete
            set_tz_migration_complete(self.name)
        super(Domain, self).save(**params)

        from corehq.apps.domain.signals import commcare_domain_post_save
        results = commcare_domain_post_save.send_robust(sender='domain',
                                                        domain=self)
        log_signal_errors(results,
                          "Error occurred during domain post_save (%s)",
                          {'domain': self.name})

    def snapshots(self, **view_kwargs):
        return Domain.view('domain/snapshots',
                           startkey=[self._id, {}],
                           endkey=[self._id],
                           include_docs=True,
                           reduce=False,
                           descending=True,
                           **view_kwargs)

    def update_deployment(self, **kwargs):
        self.deployment.update(kwargs)
        self.save()

    def update_internal(self, **kwargs):
        self.internal.update(kwargs)
        self.save()

    def display_name(self):
        if self.is_snapshot:
            return "Snapshot of %s" % self.copied_from.display_name()
        return self.hr_name or self.name

    def long_display_name(self):
        if self.is_snapshot:
            return format_html("Snapshot of {}",
                               self.copied_from.display_name())
        return self.hr_name or self.name

    __str__ = long_display_name

    def get_license_display(self):
        return LICENSES.get(self.license)

    def get_license_url(self):
        return LICENSE_LINKS.get(self.license)

    def copies(self):
        return Domain.view('domain/copied_from_snapshot',
                           key=self._id,
                           include_docs=True)

    def copies_of_parent(self):
        return Domain.view('domain/copied_from_snapshot',
                           keys=[s._id for s in self.copied_from.snapshots()],
                           include_docs=True)

    def delete(self, leave_tombstone=False):
        if not leave_tombstone and not settings.UNIT_TESTING:
            raise ValueError(
                'Cannot delete domain without leaving a tombstone except during testing'
            )
        self._pre_delete()
        if leave_tombstone:
            domain = self.get(self._id)
            if not domain.doc_type.endswith('-Deleted'):
                domain.doc_type = '{}-Deleted'.format(domain.doc_type)
                domain.save()
        else:
            super().delete()

        # The save signals can undo effect of clearing the cache within the save
        # because they query the stale view (but attaches the up to date doc).
        # This is only a problem on delete/soft-delete,
        # because these change the presence in the index, not just the doc content.
        # Since this is rare, I'm opting to just re-clear the cache here
        # rather than making the signals use a strict lookup or something like that.
        self.clear_caches()

    def _pre_delete(self):
        from corehq.apps.domain.deletion import apply_deletion_operations

        # delete SQL models first because UCR tables are indexed by configs in couch
        apply_deletion_operations(self.name)

        # delete couch docs
        for db, related_doc_ids in get_all_doc_ids_for_domain_grouped_by_db(
                self.name):
            iter_bulk_delete(db, related_doc_ids, chunksize=500)

    @classmethod
    def get_module_by_name(cls, domain_name):
        """
        import and return the python module corresponding to domain_name, or
        None if it doesn't exist.
        """
        module_name = settings.DOMAIN_MODULE_MAP.get(domain_name, domain_name)

        try:
            return import_module(module_name) if module_name else None
        except ImportError:
            return None

    @property
    @memoized
    def commtrack_settings(self):
        # this import causes some dependency issues so lives in here
        from corehq.apps.commtrack.models import CommtrackConfig
        if self.commtrack_enabled:
            return CommtrackConfig.for_domain(self.name)
        else:
            return None

    @property
    def has_custom_logo(self):
        return self.has_attachment(LOGO_ATTACHMENT)

    def get_custom_logo(self):
        if not self.has_custom_logo:
            return None

        return (self.fetch_attachment(LOGO_ATTACHMENT),
                self.blobs[LOGO_ATTACHMENT].content_type)

    def put_attachment(self, *args, **kw):
        return super(Domain, self).put_attachment(domain=self.name,
                                                  *args,
                                                  **kw)

    def get_case_display(self, case):
        """Get the properties display definition for a given case"""
        return self.case_display.case_details.get(case.type)

    def get_form_display(self, form):
        """Get the properties display definition for a given XFormInstance"""
        return self.case_display.form_details.get(form.xmlns)

    @property
    def location_types(self):
        from corehq.apps.locations.models import LocationType
        return LocationType.objects.filter(domain=self.name).all()

    @memoized
    def has_privilege(self, privilege):
        from corehq.apps.accounting.utils import domain_has_privilege
        return domain_has_privilege(self, privilege)

    @property
    @memoized
    def uses_locations(self):
        from corehq import privileges
        from corehq.apps.locations.models import LocationType
        return (self.has_privilege(privileges.LOCATIONS) and
                (self.commtrack_enabled
                 or LocationType.objects.filter(domain=self.name).exists()))

    def convert_to_commtrack(self):
        """
        One-stop-shop to make a domain CommTrack
        """
        from corehq.apps.commtrack.util import make_domain_commtrack
        make_domain_commtrack(self)

    def clear_caches(self):
        from .utils import domain_restricts_superusers
        super(Domain, self).clear_caches()
        self.get_by_name.clear(self.__class__, self.name)
        self.is_secure_session_required.clear(self.name)
        domain_restricts_superusers.clear(self.name)

    def get_daily_outbound_sms_limit(self):
        if self.custom_daily_outbound_sms_limit:
            return self.custom_daily_outbound_sms_limit

        # https://manage.dimagi.com/default.asp?274299
        return 50000
Example #23
0
class Repeater(QuickCachedDocumentMixin, Document):
    """
    Represents the configuration of a repeater. Will specify the URL to forward to and
    other properties of the configuration.
    """
    base_doc = 'Repeater'

    domain = StringProperty()
    connection_settings_id = IntegerProperty(required=False, default=None)
    # TODO: Delete the following properties once all Repeaters have been
    #       migrated to ConnectionSettings. (2020-05-16)
    url = StringProperty()
    auth_type = StringProperty(choices=(BASIC_AUTH, DIGEST_AUTH, OAUTH1,
                                        BEARER_AUTH),
                               required=False)
    username = StringProperty()
    password = StringProperty()  # See also plaintext_password()
    skip_cert_verify = BooleanProperty(default=False)  # See also verify()
    notify_addresses_str = StringProperty(
        default="")  # See also notify_addresses()

    format = StringProperty()
    friendly_name = _("Data")
    paused = BooleanProperty(default=False)

    # TODO: Use to collect stats to determine whether remote endpoint is valid
    started_at = DateTimeProperty(default=datetime.utcnow)
    last_success_at = DateTimeProperty(required=False, default=None)
    failure_streak = IntegerProperty(default=0)

    payload_generator_classes = ()

    _has_config = False

    def __str__(self):
        return f'{self.__class__.__name__}: {self.name}'

    def __repr__(self):
        return f"<{self.__class__.__name__} {self._id} {self.name!r}>"

    @property
    def connection_settings(self):
        if not self.connection_settings_id:
            return self.create_connection_settings()
        return self._get_connection_settings()

    # Cache across instances to avoid N+1 query problem when calling
    # Repeater.get_url() for each row in repeat record report
    @quickcache(['self.connection_settings_id'])
    def _get_connection_settings(self):
        return ConnectionSettings.objects.get(pk=self.connection_settings_id)

    @property
    def name(self):
        return self.connection_settings.name

    @classmethod
    def available_for_domain(cls, domain):
        """Returns whether this repeater can be used by a particular domain
        """
        return True

    def get_pending_record_count(self):
        return get_pending_repeat_record_count(self.domain, self._id)

    def get_failure_record_count(self):
        return get_failure_repeat_record_count(self.domain, self._id)

    def get_success_record_count(self):
        return get_success_repeat_record_count(self.domain, self._id)

    def get_cancelled_record_count(self):
        return get_cancelled_repeat_record_count(self.domain, self._id)

    def _format_or_default_format(self):
        from corehq.motech.repeaters.repeater_generators import RegisterGenerator
        return self.format or RegisterGenerator.default_format_by_repeater(
            self.__class__)

    def _get_payload_generator(self, payload_format):
        from corehq.motech.repeaters.repeater_generators import RegisterGenerator
        gen = RegisterGenerator.generator_class_by_repeater_format(
            self.__class__, payload_format)
        return gen(self)

    @property
    @memoized
    def generator(self):
        return self._get_payload_generator(self._format_or_default_format())

    def payload_doc(self, repeat_record):
        raise NotImplementedError

    @memoized
    def get_payload(self, repeat_record):
        return self.generator.get_payload(repeat_record,
                                          self.payload_doc(repeat_record))

    def get_attempt_info(self, repeat_record):
        return None

    def register(self, payload):
        if not self.allowed_to_forward(payload):
            return

        now = datetime.utcnow()
        repeat_record = RepeatRecord(repeater_id=self.get_id,
                                     repeater_type=self.doc_type,
                                     domain=self.domain,
                                     registered_on=now,
                                     next_check=now,
                                     payload_id=payload.get_id)
        metrics_counter('commcare.repeaters.new_record',
                        tags={
                            'domain': self.domain,
                            'doc_type': self.doc_type
                        })
        repeat_record.save()
        repeat_record.attempt_forward_now()
        return repeat_record

    def allowed_to_forward(self, payload):
        """
        Return True/False depending on whether the payload meets forawrding criteria or not
        """
        return True

    def clear_caches(self):
        super(Repeater, self).clear_caches()
        # Also expire for cases repeater is fetched using Repeater class.
        # The quick cache called in clear_cache also check on relies of doc class
        # so in case the class is set as Repeater it is not expired like in edit forms.
        # So expire it explicitly here with Repeater class as well.
        Repeater.get.clear(Repeater, self._id)
        if self.__class__ == Repeater:
            cls = self.get_class_from_doc_type(self.doc_type)
        else:
            cls = self.__class__
        # force views to catch up with the change before invalidating the cache
        # for consistency of stale_query
        force_update_repeaters_views()
        # clear cls.by_domain (i.e. filtered by doc type)
        Repeater.by_domain.clear(cls, self.domain)
        Repeater.by_domain.clear(cls, self.domain, stale_query=True)
        # clear Repeater.by_domain (i.e. not filtered by doc type)
        Repeater.by_domain.clear(Repeater, self.domain)
        Repeater.by_domain.clear(Repeater, self.domain, stale_query=True)

    @classmethod
    @quickcache(['cls.__name__', 'domain', 'stale_query'],
                timeout=60 * 60,
                memoize_timeout=10)
    def by_domain(cls, domain, stale_query=False):
        key = [domain]
        stale_kwargs = {}
        if stale_query:
            stale_kwargs['stale'] = stale_ok()
        if cls.__name__ in get_all_repeater_types():
            key.append(cls.__name__)
        elif cls.__name__ == Repeater.__name__:
            # In this case the wrap function delegates to the
            # appropriate sub-repeater types.
            pass
        else:
            # Any repeater type can be posted to the API, and the installed apps
            # determine whether we actually know about it.
            # But if we do not know about it, then may as well return nothing now
            return []

        raw_docs = cls.view('repeaters/repeaters',
                            startkey=key,
                            endkey=key + [{}],
                            include_docs=True,
                            reduce=False,
                            wrap_doc=False,
                            **stale_kwargs)

        return [
            cls.wrap(repeater_doc['doc']) for repeater_doc in raw_docs
            if cls.get_class_from_doc_type(repeater_doc['doc']['doc_type'])
        ]

    @classmethod
    def wrap(cls, data):
        data.pop('name', None)
        if cls.__name__ == Repeater.__name__:
            cls_ = cls.get_class_from_doc_type(data['doc_type'])
            if cls_:
                return cls_.wrap(data)
            else:
                raise ResourceNotFound('Unknown repeater type: %s' % data)
        else:
            return super(Repeater, cls).wrap(data)

    @staticmethod
    def get_class_from_doc_type(doc_type):
        doc_type = doc_type.replace(DELETED_SUFFIX, '')
        repeater_types = get_all_repeater_types()
        if doc_type in repeater_types:
            return repeater_types[doc_type]
        else:
            return None

    def retire(self):
        if DELETED_SUFFIX not in self['doc_type']:
            self['doc_type'] += DELETED_SUFFIX
        if DELETED_SUFFIX not in self['base_doc']:
            self['base_doc'] += DELETED_SUFFIX
        self.paused = False
        self.save()

    def pause(self):
        self.paused = True
        self.save()

    def resume(self):
        self.paused = False
        self.save()

    def get_url(self, repeat_record):
        # to be overridden
        return self.connection_settings.url

    def allow_retries(self, response):
        """Whether to requeue the repeater when it fails
        """
        # respect the `retry` field of RepeaterResponse
        return getattr(response, 'retry', True)

    def get_headers(self, repeat_record):
        # to be overridden
        return self.generator.get_headers()

    @property
    def plaintext_password(self):
        def clean_repr(bytes_repr):
            """
            Drops the bytestring representation from ``bytes_repr``

            >>> clean_repr("b'spam'")
            'spam'
            """
            if bytes_repr.startswith("b'") and bytes_repr.endswith("'"):
                return bytes_repr[2:-1]
            return bytes_repr

        if self.password is None:
            return ''
        if self.password.startswith('${algo}$'.format(algo=ALGO_AES)):
            ciphertext = self.password.split('$', 2)[2]
            # Work around Py2to3 string-handling bug in encryption code
            # (fixed on 2018-03-12 by commit 3a900068)
            ciphertext = clean_repr(ciphertext)
            return b64_aes_decrypt(ciphertext)
        return self.password

    @property
    def verify(self):
        return not self.skip_cert_verify

    def send_request(self, repeat_record, payload):
        url = self.get_url(repeat_record)
        return simple_post(
            self.domain,
            url,
            payload,
            headers=self.get_headers(repeat_record),
            auth_manager=self.connection_settings.get_auth_manager(),
            verify=self.verify,
            notify_addresses=self.connection_settings.notify_addresses,
            payload_id=repeat_record.payload_id,
        )

    def fire_for_record(self, repeat_record):
        payload = self.get_payload(repeat_record)
        try:
            response = self.send_request(repeat_record, payload)
        except (Timeout, ConnectionError) as error:
            log_repeater_timeout_in_datadog(self.domain)
            return self.handle_response(RequestConnectionError(error),
                                        repeat_record)
        except RequestException as err:
            return self.handle_response(err, repeat_record)
        except PossibleSSRFAttempt:
            return self.handle_response(Exception("Invalid URL"),
                                        repeat_record)
        except Exception as e:
            # This shouldn't ever happen in normal operation and would mean code broke
            # we want to notify ourselves of the error detail and tell the user something vague
            notify_exception(None,
                             "Unexpected error sending repeat record request")
            return self.handle_response(Exception("Internal Server Error"),
                                        repeat_record)
        else:
            return self.handle_response(response, repeat_record)

    def handle_response(self, result, repeat_record):
        """
        route the result to the success, failure, or exception handlers

        result may be either a response object or an exception
        """
        if isinstance(result, Exception):
            attempt = repeat_record.handle_exception(result)
        elif is_response(
                result) and 200 <= result.status_code < 300 or result is True:
            attempt = repeat_record.handle_success(result)
        else:
            attempt = repeat_record.handle_failure(result)
        return attempt

    @property
    def form_class_name(self):
        """
        Return the name of the class whose edit form this class uses.

        (Most classes that extend CaseRepeater, and all classes that
        extend FormRepeater, use the same form.)
        """
        return self.__class__.__name__

    def create_connection_settings(self):
        if self.connection_settings_id:
            return  # Nothing to do
        conn = ConnectionSettings(
            domain=self.domain,
            name=self.url,
            url=self.url,
            auth_type=self.auth_type,
            username=self.username,
            skip_cert_verify=self.skip_cert_verify,
            notify_addresses_str=self.notify_addresses_str or '',
        )
        # Allow ConnectionSettings to encrypt old Repeater passwords:
        conn.plaintext_password = self.plaintext_password
        conn.save()
        self.connection_settings_id = conn.id
        self.save()
        return conn
Example #24
0
class FixtureDataItem(Document):
    """
    Example old Item:
        domain = "hq-domain"
        data_type_id = <id of state FixtureDataType>
        fields = {
            "country": "India",
            "state_name": "Delhi",
            "state_id": "DEL"
        }

    Example new Item with attributes:
        domain = "hq-domain"
        data_type_id = <id of state FixtureDataType>
        fields = {
            "country": {"field_list": [
                {"field_value": "India", "properties": {}},
            ]},
            "state_name": {"field_list": [
                {"field_value": "Delhi_IN_ENG", "properties": {"lang": "eng"}},
                {"field_value": "Delhi_IN_HIN", "properties": {"lang": "hin"}},
            ]},
            "state_id": {"field_list": [
                {"field_value": "DEL", "properties": {}}
            ]}
        }
    If one of field's 'properties' is an empty 'dict', the field has no attributes
    """
    domain = StringProperty()
    data_type_id = StringProperty()
    fields = DictProperty(FieldList)
    item_attributes = DictProperty()
    sort_key = IntegerProperty()

    @classmethod
    def wrap(cls, obj):
        if not obj["doc_type"] == "FixtureDataItem":
            raise ResourceNotFound
        if not obj["fields"]:
            return super(FixtureDataItem, cls).wrap(obj)

        # Migrate old basic fields to fields with attributes

        is_of_new_type = False
        fields_dict = {}

        def _is_new_type(field_val):
            old_types = (basestring, int, float)
            return field_val is not None and not isinstance(
                field_val, old_types)

        for field in obj['fields']:
            field_val = obj['fields'][field]
            if _is_new_type(field_val):
                # assumes all-or-nothing conversion of old types to new
                is_of_new_type = True
                break
            fields_dict[field] = {
                "field_list": [{
                    'field_value':
                    str(field_val)
                    if not isinstance(field_val, basestring) else field_val,
                    'properties': {}
                }]
            }
        if not is_of_new_type:
            obj['fields'] = fields_dict

        # Migrate fixture-items to have attributes
        if 'item_attributes' not in obj:
            obj['item_attributes'] = {}

        return super(FixtureDataItem, cls).wrap(obj)

    @property
    def fields_without_attributes(self):
        fields = {}
        for field in self.fields:
            # if the field has properties, a unique field_val can't be generated for FixtureItem
            if len(self.fields[field].field_list) > 1:
                raise FixtureVersionError(
                    "This method is not supported for fields with properties."
                    " field '%s' has properties" % field)
            fields[field] = self.fields[field].field_list[0].field_value
        return fields

    @property
    def try_fields_without_attributes(self):
        """This is really just for the API"""
        try:
            return self.fields_without_attributes
        except FixtureVersionError:
            return {
                key: value.to_api_json()
                for key, value in self.fields.items()
            }

    @property
    def data_type(self):
        if not hasattr(self, '_data_type'):
            self._data_type = FixtureDataType.get(self.data_type_id)
        return self._data_type

    def add_owner(self, owner, owner_type, transaction=None):
        assert (owner.domain == self.domain)
        with transaction or CouchTransaction() as transaction:
            o = FixtureOwnership(domain=self.domain,
                                 owner_type=owner_type,
                                 owner_id=owner.get_id,
                                 data_item_id=self.get_id)
            transaction.save(o)
        return o

    def remove_owner(self, owner, owner_type):
        for ownership in FixtureOwnership.view(
                'fixtures/ownership',
                key=[
                    self.domain, 'by data_item and ' + owner_type, self.get_id,
                    owner.get_id
                ],
                reduce=False,
                include_docs=True):
            try:
                ownership.delete()
            except ResourceNotFound:
                # looks like it was already deleted
                pass
            except ResourceConflict:
                raise FixtureException((
                    "couldn't remove ownership {owner_id} for item {fixture_id} of type "
                    "{data_type_id} in domain {domain}. It was updated elsewhere"
                ).format(owner_id=ownership._id,
                         fixture_id=self._id,
                         data_type_id=self.data_type_id,
                         domain=self.domain))

    def add_user(self, user, transaction=None):
        return self.add_owner(user, 'user', transaction=transaction)

    def remove_user(self, user):
        return self.remove_owner(user, 'user')

    def add_group(self, group, transaction=None):
        return self.add_owner(group, 'group', transaction=transaction)

    def remove_group(self, group):
        return self.remove_owner(group, 'group')

    def add_location(self, location, transaction=None):
        return self.add_owner(location, 'location', transaction=transaction)

    def remove_location(self, location):
        return self.remove_owner(location, 'location')

    def type_check(self):
        fields = set(self.fields.keys())
        for field in self.data_type.fields:
            if field.field_name in fields:
                fields.remove(field)
            else:
                raise FixtureTypeCheckError("field %s not in fixture data %s" %
                                            (field.field_name, self.get_id))
        if fields:
            raise FixtureTypeCheckError(
                "fields %s from fixture data %s not in fixture data type" %
                (', '.join(fields), self.get_id))

    def to_xml(self):
        def _serialize(val):
            if isinstance(val, (int, Decimal)):
                return unicode(val)
            else:
                return val if val is not None else ""

        xData = ElementTree.Element(self.data_type.tag)
        for attribute in self.data_type.item_attributes:
            try:
                xData.attrib[attribute] = _serialize(
                    self.item_attributes[attribute])
            except KeyError as e:
                # This should never occur, buf if it does, the OTA restore on mobile will fail and
                # this error would have been raised and email-logged.
                raise FixtureTypeCheckError(
                    "Table with tag %s has an item with id %s that doesn't have an attribute as defined in its types definition"
                    % (self.data_type.tag, self.get_id))
        for field in self.data_type.fields:
            escaped_field_name = clean_fixture_field_name(field.field_name)
            if not self.fields.has_key(field.field_name):
                xField = ElementTree.SubElement(xData, escaped_field_name)
                xField.text = ""
            else:
                for field_with_attr in self.fields[
                        field.field_name].field_list:
                    xField = ElementTree.SubElement(xData, escaped_field_name)
                    xField.text = _serialize(field_with_attr.field_value)
                    for attribute in field_with_attr.properties:
                        val = field_with_attr.properties[attribute]
                        xField.attrib[attribute] = _serialize(val)

        return xData

    def get_groups(self, wrap=True):
        group_ids = get_owner_ids_by_type(self.domain, 'group', self.get_id)
        if wrap:
            return set(
                Group.view(
                    '_all_docs',
                    keys=list(group_ids),
                    include_docs=True,
                ))
        else:
            return set(group_ids)

    @property
    @memoized
    def groups(self):
        return self.get_groups()

    def get_users(self, wrap=True, include_groups=False):
        user_ids = set(get_owner_ids_by_type(self.domain, 'user', self.get_id))
        if include_groups:
            group_ids = self.get_groups(wrap=False)
        else:
            group_ids = set()
        users_in_groups = [
            group.get_users(only_commcare=True) for group in Group.view(
                '_all_docs', keys=list(group_ids), include_docs=True)
        ]
        if wrap:
            return set(
                CommCareUser.view('_all_docs',
                                  keys=list(user_ids),
                                  include_docs=True)).union(*users_in_groups)
        else:
            return user_ids | set([user.get_id for user in users_in_groups])

    def get_all_users(self, wrap=True):
        return self.get_users(wrap=wrap, include_groups=True)

    @property
    @memoized
    def users(self):
        return self.get_users()

    @property
    @memoized
    def locations(self):
        loc_ids = get_owner_ids_by_type(self.domain, 'location', self.get_id)
        return SQLLocation.objects.filter(location_id__in=loc_ids)

    @classmethod
    def by_user(cls, user, wrap=True, domain=None):
        group_ids = Group.by_user(user, wrap=False)

        if isinstance(user, dict):
            # Added 2015-07-31, feel free to remove eventually.
            _assert = soft_assert('@'.join(['esoergel', 'dimagi.com']))
            _assert(False, "This apparently IS called with a user dict. How?")

            user_id = user.get('user_id')
            user_domain = domain
            location = CommCareUser.get(user_id).sql_location
        else:
            user_id = user.user_id
            user_domain = user.domain
            location = user.sql_location

        loc_ids = location.path if location else []

        def make_keys(owner_type, ids):
            return [[user_domain, 'data_item by {}'.format(owner_type), id_]
                    for id_ in ids]

        fixture_ids = set(FixtureOwnership.get_db().view(
            'fixtures/ownership',
            keys=(make_keys('user', [user_id]) +
                  make_keys('group', group_ids) +
                  make_keys('location', loc_ids)),
            reduce=False,
            wrapper=lambda r: r['value'],
        ))
        if wrap:
            results = cls.get_db().view('_all_docs',
                                        keys=list(fixture_ids),
                                        include_docs=True)

            # sort the results into those corresponding to real documents
            # and those corresponding to deleted or non-existent documents
            docs = []
            deleted_fixture_ids = set()

            for result in results:
                if result.get('doc'):
                    docs.append(cls.wrap(result['doc']))
                elif result.get('error'):
                    assert result['error'] == 'not_found'
                    deleted_fixture_ids.add(result['key'])
                else:
                    assert result['value']['deleted'] is True
                    deleted_fixture_ids.add(result['id'])

            # fetch and delete ownership documents pointing
            # to deleted or non-existent fixture documents
            # this cleanup is necessary since we used to not do this
            bad_ownerships = FixtureOwnership.for_all_item_ids(
                deleted_fixture_ids, user_domain)
            FixtureOwnership.get_db().bulk_delete(bad_ownerships)

            return docs
        else:
            return fixture_ids

    @classmethod
    def by_group(cls, group, wrap=True):
        fixture_ids = cls.get_db().view(
            'fixtures/ownership',
            key=[group.domain, 'data_item by group', group.get_id],
            reduce=False,
            wrapper=lambda r: r['value'],
            descending=True).all()

        return cls.view('_all_docs', keys=list(fixture_ids),
                        include_docs=True) if wrap else fixture_ids

    @classmethod
    def by_data_type(cls, domain, data_type):
        data_type_id = _id_from_doc(data_type)
        return cls.view('fixtures/data_items_by_domain_type',
                        key=[domain, data_type_id],
                        reduce=False,
                        include_docs=True,
                        descending=True)

    @classmethod
    def by_domain(cls, domain):
        return cls.view('fixtures/data_items_by_domain_type',
                        startkey=[domain, {}],
                        endkey=[domain],
                        reduce=False,
                        include_docs=True,
                        descending=True)

    @classmethod
    def by_field_value(cls, domain, data_type, field_name, field_value):
        data_type_id = _id_from_doc(data_type)
        return cls.view('fixtures/data_items_by_field_value',
                        key=[domain, data_type_id, field_name, field_value],
                        reduce=False,
                        include_docs=True)

    @classmethod
    def get_item_list(cls, domain, tag):
        data_type = FixtureDataType.by_domain_tag(domain, tag).one()
        return cls.by_data_type(domain, data_type).all()

    @classmethod
    def get_indexed_items(cls, domain, tag, index_field):
        """
        Looks up an item list and converts to mapping from `index_field`
        to a dict of all fields for that item.

            fixtures = FixtureDataItem.get_indexed_items('my_domain',
                'item_list_tag', 'index_field')
            result = fixtures['index_val']['result_field']
        """
        fixtures = cls.get_item_list(domain, tag)
        return dict((f.fields_without_attributes[index_field],
                     f.fields_without_attributes) for f in fixtures)

    def delete_ownerships(self, transaction):
        ownerships = FixtureOwnership.by_item_id(self.get_id, self.domain)
        transaction.delete_all(ownerships)

    def recursive_delete(self, transaction):
        self.delete_ownerships(transaction)
        transaction.delete(self)
Example #25
0
class RepeatRecord(Document):
    """
    An record of a particular instance of something that needs to be forwarded
    with a link to the proper repeater object
    """

    domain = StringProperty()
    repeater_id = StringProperty()
    repeater_type = StringProperty()
    payload_id = StringProperty()

    overall_tries = IntegerProperty(default=0)
    max_possible_tries = IntegerProperty(default=6)

    attempts = ListProperty(RepeatRecordAttempt)

    cancelled = BooleanProperty(default=False)
    registered_on = DateTimeProperty()
    last_checked = DateTimeProperty()
    failure_reason = StringProperty()
    next_check = DateTimeProperty()
    succeeded = BooleanProperty(default=False)

    @property
    def record_id(self):
        return self._id

    @classmethod
    def wrap(cls, data):
        should_bootstrap_attempts = ('attempts' not in data)

        self = super(RepeatRecord, cls).wrap(data)

        if should_bootstrap_attempts and self.last_checked:
            assert not self.attempts
            self.attempts = [
                RepeatRecordAttempt(
                    cancelled=self.cancelled,
                    datetime=self.last_checked,
                    failure_reason=self.failure_reason,
                    success_response=None,
                    next_check=self.next_check,
                    succeeded=self.succeeded,
                )
            ]
        return self

    @property
    @memoized
    def repeater(self):
        try:
            return Repeater.get(self.repeater_id)
        except ResourceNotFound:
            return None

    @property
    def url(self):
        warnings.warn(
            "RepeatRecord.url is deprecated. Use Repeater.get_url instead",
            DeprecationWarning)
        if self.repeater:
            return self.repeater.get_url(self)

    @property
    def state(self):
        state = RECORD_PENDING_STATE
        if self.succeeded:
            state = RECORD_SUCCESS_STATE
        elif self.cancelled:
            state = RECORD_CANCELLED_STATE
        elif self.failure_reason:
            state = RECORD_FAILURE_STATE
        return state

    @classmethod
    def all(cls, domain=None, due_before=None, limit=None):
        json_now = json_format_datetime(due_before or datetime.utcnow())
        repeat_records = RepeatRecord.view(
            "repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, json_now, {}],
            include_docs=True,
            reduce=False,
            limit=limit,
        )
        return repeat_records

    @classmethod
    def count(cls, domain=None):
        results = RepeatRecord.view(
            "repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, {}],
            reduce=True,
        ).one()
        return results['value'] if results else 0

    def add_attempt(self, attempt):
        self.attempts.append(attempt)
        self.last_checked = attempt.datetime
        self.next_check = attempt.next_check
        self.succeeded = attempt.succeeded
        self.cancelled = attempt.cancelled
        self.failure_reason = attempt.failure_reason

    def get_numbered_attempts(self):
        for i, attempt in enumerate(self.attempts):
            yield i + 1, attempt

    def postpone_by(self, duration):
        self.last_checked = datetime.utcnow()
        self.next_check = self.last_checked + duration
        self.save()

    def make_set_next_try_attempt(self, failure_reason):
        assert self.succeeded is False
        assert self.next_check is not None
        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=failure_reason,
            success_response=None,
            next_check=now + _get_retry_interval(self.last_checked, now),
            succeeded=False,
        )

    def try_now(self):
        # try when we haven't succeeded and either we've
        # never checked, or it's time to check again
        return not self.succeeded

    def get_payload(self):
        return self.repeater.get_payload(self)

    def get_attempt_info(self):
        return self.repeater.get_attempt_info(self)

    def handle_payload_exception(self, exception):
        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=True,
            datetime=now,
            failure_reason=str(exception),
            success_response=None,
            next_check=None,
            succeeded=False,
        )

    def fire(self, force_send=False):
        if self.try_now() or force_send:
            self.overall_tries += 1
            try:
                attempt = self.repeater.fire_for_record(self)
            except Exception as e:
                log_repeater_error_in_datadog(self.domain,
                                              status_code=None,
                                              repeater_type=self.repeater_type)
                attempt = self.handle_payload_exception(e)
                raise
            finally:
                # pycharm warns attempt might not be defined.
                # that'll only happen if fire_for_record raise a non-Exception exception (e.g. SIGINT)
                # or handle_payload_exception raises an exception. I'm okay with that. -DMR
                self.add_attempt(attempt)
                self.save()

    @staticmethod
    def _format_response(response):
        if not _is_response(response):
            return None
        response_body = getattr(response, "text", "")
        return '{}: {}.\n{}'.format(response.status_code, response.reason,
                                    response_body)

    def handle_success(self, response):
        """
        Log success in Datadog and return a success RepeatRecordAttempt.

        ``response`` can be a Requests response instance, or True if the
        payload did not result in an API call.
        """
        now = datetime.utcnow()
        if _is_response(response):
            # ^^^ Don't bother logging success in Datadog if the payload
            # did not need to be sent. (This can happen with DHIS2 if
            # the form that triggered the forwarder doesn't contain data
            # for a DHIS2 Event.)
            log_repeater_success_in_datadog(self.domain, response.status_code,
                                            self.repeater_type)
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=None,
            success_response=self._format_response(response),
            next_check=None,
            succeeded=True,
            info=self.get_attempt_info(),
        )

    def handle_failure(self, response):
        """Do something with the response if the repeater fails
        """
        return self._make_failure_attempt(self._format_response(response),
                                          response)

    def handle_exception(self, exception):
        """handle internal exceptions
        """
        return self._make_failure_attempt(str(exception), None)

    def _make_failure_attempt(self, reason, response):
        log_repeater_error_in_datadog(
            self.domain, response.status_code if response else None,
            self.repeater_type)

        if self.repeater.allow_retries(
                response) and self.overall_tries < self.max_possible_tries:
            return self.make_set_next_try_attempt(reason)
        else:
            now = datetime.utcnow()
            return RepeatRecordAttempt(
                cancelled=True,
                datetime=now,
                failure_reason=reason,
                success_response=None,
                next_check=None,
                succeeded=False,
                info=self.get_attempt_info(),
            )

    def cancel(self):
        self.next_check = None
        self.cancelled = True

    def attempt_forward_now(self):
        from corehq.motech.repeaters.tasks import process_repeat_record

        def is_ready():
            return self.next_check < datetime.utcnow()

        def already_processed():
            return self.succeeded or self.cancelled or self.next_check is None

        if already_processed() or not is_ready():
            return

        # Set the next check to happen an arbitrarily long time from now so
        # if something goes horribly wrong with the delayed task it will not
        # be lost forever. A check at this time is expected to occur rarely,
        # if ever, because `process_repeat_record` will usually succeed or
        # reset the next check to sometime sooner.
        self.next_check = datetime.utcnow() + timedelta(hours=48)
        try:
            self.save()
        except ResourceConflict:
            # Another process beat us to the punch. This takes advantage
            # of Couch DB's optimistic locking, which prevents a process
            # with stale data from overwriting the work of another.
            return
        process_repeat_record.delay(self)

    def requeue(self):
        self.cancelled = False
        self.succeeded = False
        self.failure_reason = ''
        self.overall_tries = 0
        self.next_check = datetime.utcnow()
Example #26
0
class IndicatorDefinition(Document, AdminCRUDDocumentMixin):
    """
    An Indicator Definition defines how to compute the indicator that lives
    in the namespaced computed_ property of a case or form.
    """
    namespace = StringProperty()
    domain = StringProperty()
    slug = StringProperty()
    version = IntegerProperty()
    class_path = StringProperty()
    last_modified = DateTimeProperty()

    _admin_crud_class = IndicatorAdminCRUDManager

    _class_path = "corehq.apps.indicators.models"
    _returns_multiple = False

    def __init__(self, _d=None, **kwargs):
        super(IndicatorDefinition, self).__init__(_d, **kwargs)
        self.class_path = self._class_path

    def __str__(self):
        return "\n\n%(class_name)s - Modified %(last_modified)s\n %(slug)s, domain: %(domain)s," \
            " version: %(version)s, namespace: %(namespace)s. ID: %(indicator_id)s." % {
                'class_name': self.__class__.__name__,
                'slug': self.slug,
                'domain': self.domain,
                'version': self.version,
                'namespace': self.namespace,
                'last_modified': (self.last_modified.strftime('%m %B %Y at %H:%M')
                                  if self.last_modified else "Ages Ago"),
                'indicator_id': self._id,
            }

    @classmethod
    def key_properties(cls):
        """
            The ordering of these property names should match the ordering of what's emitted in the first part of
            the couch views used for fetching these indicators. These views currently are:
            - indicators/dynamic_indicator_definitions (Couch View Indicator Defs)
            - indicators/indicator_definitions (Form and Case Indicator Defs)
        """
        return ["namespace", "domain", "slug"]

    @classmethod
    def indicator_list_view(cls):
        return "indicators/indicator_definitions"

    @classmethod
    def _generate_couch_key(cls, version=None, reverse=False, **kwargs):
        key = list()
        key_prefix = list()
        for p in cls.key_properties():
            k = kwargs.get(p)
            if k is not None:
                key_prefix.append(p)
                key.append(k)
        key = [" ".join(key_prefix)] + key
        couch_key = dict(startkey=key, endkey=key +
                         [{}]) if version is None else dict(key=key +
                                                            [version])
        if reverse:
            return dict(startkey=couch_key.get('endkey'),
                        endkey=couch_key.get('startkey'))
        return couch_key

    @classmethod
    def increment_or_create_unique(cls,
                                   namespace,
                                   domain,
                                   slug=None,
                                   version=None,
                                   **kwargs):
        """
        If an indicator with the same namespace, domain, and version exists, create a new indicator with the
        version number incremented.
        # todo, this feels a bit buggy, so replace bulk copy indicators with
        # copy to domain at some point
        """
        couch_key = cls._generate_couch_key(namespace=namespace,
                                            domain=domain,
                                            slug=slug,
                                            reverse=True,
                                            **kwargs)

        existing_indicator = cls.view(cls.indicator_list_view(),
                                      reduce=False,
                                      include_docs=True,
                                      descending=True,
                                      limit=1,
                                      **couch_key).first()
        if existing_indicator:
            version = existing_indicator.version + 1
        elif version is None:
            version = 1

        new_indicator = cls(version=version,
                            namespace=namespace,
                            domain=domain,
                            slug=slug,
                            **kwargs)
        new_indicator.last_modified = datetime.datetime.utcnow()

        new_indicator.save()
        return new_indicator

    @classmethod
    def copy_to_domain(cls, domain, doc, override=False):
        """
        This copies an indicator doc to the current domain. Intended to be used
        by the export indicators feature.
        :param domain: the name of the domain the indicator should be copied to
        :param doc: the dictionary of kwargs to create the indicator
        :param override: Whether to override the existing indicator
        :return: True if indicator was copied, False if not
        """
        for reserved in ['_id', '_rev', 'last_modified']:
            if reserved in doc:
                del doc[reserved]

        couch_key = cls._generate_couch_key(domain=domain, reverse=True, **doc)
        existing_indicator = cls.view(cls.indicator_list_view(),
                                      reduce=False,
                                      include_docs=False,
                                      descending=True,
                                      limit=1,
                                      **couch_key).first()
        if existing_indicator and not override:
            return False
        if existing_indicator:
            existing_indicator.delete()
        new_indicator = cls(domain=domain, **doc)
        new_indicator.last_modified = datetime.datetime.utcnow()
        new_indicator.save()
        return True

    @classmethod
    @memoized
    def get_current(cls,
                    namespace,
                    domain,
                    slug,
                    version=None,
                    wrap=True,
                    **kwargs):

        couch_key = cls._generate_couch_key(namespace=namespace,
                                            domain=domain,
                                            slug=slug,
                                            version=version,
                                            reverse=True,
                                            **kwargs)
        results = cache_core.cached_view(cls.get_db(),
                                         cls.indicator_list_view(),
                                         cache_expire=60 * 60 * 6,
                                         reduce=False,
                                         include_docs=False,
                                         descending=True,
                                         **couch_key)
        doc = results[0] if results else None
        if wrap and doc:
            try:
                doc_class = to_function(
                    doc.get('value',
                            "%s.%s" % (cls._class_path, cls.__name__)))
                doc_instance = doc_class.get(doc.get('id'))
                return doc_instance
            except Exception as e:
                logging.error(
                    "No matching documents found for indicator %s: %s" %
                    (slug, e))
                return None
        return doc

    @classmethod
    def all_slugs(cls, namespace, domain, **kwargs):
        couch_key = cls._generate_couch_key(namespace=namespace,
                                            domain=domain,
                                            reverse=True,
                                            **kwargs)
        couch_key['startkey'][0] = couch_key.get('startkey', [])[0] + ' slug'
        couch_key['endkey'][0] = couch_key.get('endkey', [])[0] + ' slug'
        data = cls.view(cls.indicator_list_view(),
                        group=True,
                        group_level=cls.key_properties().index('slug') + 2,
                        descending=True,
                        **couch_key).all()
        return [item.get('key', [])[-1] for item in data]

    @classmethod
    @memoized
    def get_all(cls, namespace, domain, version=None, **kwargs):
        all_slugs = cls.all_slugs(namespace, domain, **kwargs)
        all_indicators = list()
        for slug in all_slugs:
            indicator = cls.get_current(namespace,
                                        domain,
                                        slug,
                                        version=version,
                                        **kwargs)
            if indicator and issubclass(indicator.__class__, cls):
                all_indicators.append(indicator)
        return all_indicators

    @classmethod
    def get_all_of_type(cls, namespace, domain, show_only_current=False):
        key = ["type", namespace, domain, cls.__name__]
        indicators = cls.view(cls.indicator_list_view(),
                              reduce=False,
                              include_docs=True,
                              startkey=key,
                              endkey=key + [{}]).all()
        unique = {}
        for ind in indicators:
            if ind.base_doc == "CaseIndicatorDefinition":
                specific_doc = ind.case_type
            elif ind.base_doc == "FormIndicatorDefinition":
                specific_doc = ind.xmlns
            else:
                specific_doc = "couch"
            unique["%s.%s.%s" % (ind.slug, ind.namespace, specific_doc)] = ind
        return list(unique.values())

    @classmethod
    def get_nice_name(cls):
        return "Indicator Definition"
Example #27
0
class AbstractSyncLog(SafeSaveDocument):
    date = DateTimeProperty()
    domain = StringProperty()
    user_id = StringProperty()
    build_id = StringProperty()  # only works with app-aware sync
    app_id = StringProperty()  # only works with app-aware sync

    previous_log_id = StringProperty()  # previous sync log, forming a chain
    duration = IntegerProperty()  # in seconds
    log_format = StringProperty()

    # owner_ids_on_phone stores the ids the phone thinks it's the owner of.
    # This typically includes the user id,
    # as well as all groups that that user is a member of.
    owner_ids_on_phone = StringListProperty()

    # for debugging / logging
    previous_log_rev = StringProperty(
    )  # rev of the previous log at the time of creation
    last_submitted = DateTimeProperty(
    )  # last time a submission caused this to be modified
    rev_before_last_submitted = StringProperty(
    )  # rev when the last submission was saved
    last_cached = DateTimeProperty(
    )  # last time this generated a cached response
    hash_at_last_cached = StringProperty(
    )  # the state hash of this when it was last cached

    # save state errors and hashes here
    had_state_error = BooleanProperty(default=False)
    error_date = DateTimeProperty()
    error_hash = StringProperty()
    cache_payload_paths = DictProperty()

    last_ucr_sync_times = SchemaListProperty(UCRSyncLog)

    strict = True  # for asserts

    @classmethod
    def wrap(cls, data):
        ret = super(AbstractSyncLog, cls).wrap(data)
        if hasattr(ret, 'has_assert_errors'):
            ret.strict = False
        return ret

    def save(self):
        self._synclog_sql = save_synclog_to_sql(self)

    def delete(self):
        if getattr(self, '_synclog_sql', None):
            self._synclog_sql.delete()

    def case_count(self):
        """
        How many cases are associated with this. Used in reports.
        """
        raise NotImplementedError()

    def phone_is_holding_case(self, case_id):
        raise NotImplementedError()

    def get_footprint_of_cases_on_phone(self):
        """
        Gets the phone's flat list of all case ids on the phone,
        owned or not owned but relevant.
        """
        raise NotImplementedError()

    def get_state_hash(self):
        return CaseStateHash(
            Checksum(self.get_footprint_of_cases_on_phone()).hexdigest())

    def update_phone_lists(self, xform, case_list):
        """
        Given a form an list of touched cases, update this sync log to reflect the updated
        state on the phone.
        """
        raise NotImplementedError()

    @classmethod
    def from_other_format(cls, other_sync_log):
        """
        Convert to an instance of a subclass from another subclass. Subclasses can
        override this to provide conversion functions.
        """
        raise IncompatibleSyncLogType('Unable to convert from {} to {}'.format(
            type(other_sync_log),
            cls,
        ))

    # anything prefixed with 'tests_only' is only used in tests
    def tests_only_get_cases_on_phone(self):
        raise NotImplementedError()

    def test_only_clear_cases_on_phone(self):
        raise NotImplementedError()

    def test_only_get_dependent_cases_on_phone(self):
        raise NotImplementedError()
Example #28
0
class CouchIndicatorDef(DynamicIndicatorDefinition):
    """
        This indicator defintion expects that it will deal with a couch view and an indicator key.
        If a user_id is provided when fetching the results, this definition will use:
        ["user", <domain_name>, <user_id>, <indicator_key>] as the main couch view key
        Otherwise it will use:
        ["all", <domain_name>, <indicator_key>]

    """
    couch_view = StringProperty()
    indicator_key = StringProperty()
    startdate_shift = IntegerProperty(default=0)
    enddate_shift = IntegerProperty(default=0)
    fixed_datespan_days = IntegerProperty(default=0)
    fixed_datespan_months = IntegerProperty(default=0)

    _admin_crud_class = CouchIndicatorCRUDManager

    @property
    @memoized
    def group_results_in_retrospective(self):
        """
            Determines whether or not to group results in the retrospective
        """
        return not any(
            getattr(self, field)
            for field in ('startdate_shift', 'enddate_shift',
                          'fixed_datespan_days', 'fixed_datespan_months'))

    def _get_results_key(self, user_id=None):
        prefix = "user" if user_id else "all"
        key = [prefix, self.domain]
        if user_id:
            key.append(user_id)
        key.append(self.indicator_key)
        return key

    def _apply_datespan_shifts(self, datespan):
        if datespan and not isinstance(datespan, DateSpan):
            raise ValueError("datespan must be an instance of DateSpan")

        if datespan:
            datespan = copy.copy(datespan)
            now = datetime.datetime.utcnow()

            # make sure we don't go over the current day
            # remember, there is no timezone support for this yet
            if datespan.enddate > now:
                datespan.enddate = now

            datespan.enddate = datespan.enddate.replace(hour=23,
                                                        minute=59,
                                                        second=59,
                                                        microsecond=999999)
            if self.fixed_datespan_days:
                datespan.startdate = datespan.enddate - datetime.timedelta(
                    days=self.fixed_datespan_days, microseconds=-1)
            if self.fixed_datespan_months:
                # By making the assumption that the end date is always the end of the month
                # the first months adjustment is accomplished by moving the start date to
                # the beginning of the month. Any additional months are subtracted in the usual way
                start = self.get_first_day_of_month(datespan.enddate.year,
                                                    datespan.enddate.month)
                start_year, start_month = add_months(
                    start.year, start.month, -(self.fixed_datespan_months - 1))
                datespan.startdate = start.replace(year=start_year,
                                                   month=start_month)

            if self.startdate_shift:
                datespan.startdate = datespan.startdate + datetime.timedelta(
                    days=self.startdate_shift)
            if self.enddate_shift:
                datespan.enddate = datespan.enddate + datetime.timedelta(
                    days=self.enddate_shift)

        return datespan

    def get_results_with_key(self,
                             key,
                             user_id=None,
                             datespan=None,
                             date_group_level=None,
                             reduce=False):
        view_kwargs = dict()
        if datespan:
            view_kwargs.update(startkey=key + datespan.startdate_key_utc,
                               endkey=key + datespan.enddate_key_utc + [{}])
        else:
            view_kwargs.update(startkey=key, endkey=key + [{}])
        if date_group_level:
            base_level = 5 if user_id else 4
            view_kwargs.update(group=True,
                               group_level=base_level + date_group_level)
        else:
            view_kwargs.update(reduce=reduce)

        # Pull Data from the MVP-only DB
        from mvp_docs.models import IndicatorXForm
        db = IndicatorXForm.get_db()
        section = self.couch_view.split('/')
        couch_view = "%s_indicators/%s" % (section[0], section[1])

        return cache_core.cached_view(db,
                                      couch_view,
                                      cache_expire=60 * 60 * 6,
                                      **view_kwargs)

    def get_raw_results(self,
                        user_ids,
                        datespan=False,
                        date_group_level=False,
                        reduce=False):
        """
        date_group_level can be 0 to group by year, 1 to group by month and 2 to group by day
        """
        datespan = self._apply_datespan_shifts(datespan)
        results = []
        for user_id in user_ids:
            key = self._get_results_key(user_id)
            results.extend(
                self.get_results_with_key(key, user_id, datespan,
                                          date_group_level, reduce))
        return results

    def get_value(self, user_ids, datespan=None, is_debug=False):
        results = self.get_raw_results(user_ids, datespan, reduce=not is_debug)
        if is_debug:
            contributing_ids = [r['id'] for r in results]
            value = len(contributing_ids)
            return value, contributing_ids
        value = 0
        for result in results:
            value += self._get_value_from_result(result)
        return value

    def _get_value_from_result(self, result):
        value = 0
        if isinstance(result, dict):
            result = [result]
        for item in result:
            new_val = item.get('value')
            if isinstance(new_val, dict):
                if '_total_unique' in new_val:
                    value += new_val.get('_total_unique', 0)
                elif '_sum_unique':
                    value += new_val.get('_sum_unique', 0)
            else:
                value += new_val
        return value

    def get_values_by_month(self, user_ids, datespan=None):
        totals = dict()
        result = self.get_raw_results(user_ids, datespan, date_group_level=1)
        for item in result:
            key = item.get('key', [])
            if len(key) >= 2:
                value = self._get_value_from_result(item)
                year = str(key[-2])
                month = str(key[-1])
                if not (month and year):
                    continue
                if year not in totals:
                    totals[year] = dict()
                if month not in totals[year]:
                    totals[year][month] = 0
                totals[year][month] += value
        return totals

    def get_values_by_year(self, user_ids, datespan=None):
        totals = dict()
        result = self.get_raw_results(user_ids, datespan, date_group_level=0)
        for item in result:
            key = item.get('key', [])
            value = self._get_value_from_result(item)
            if len(key) >= 1:
                year = str(key[-1])
                if not year:
                    continue
                if year not in totals:
                    totals[year] = 0
                totals[year] += value
        return totals

    def get_monthly_retrospective(self,
                                  user_ids=None,
                                  current_month=None,
                                  num_previous_months=12,
                                  return_only_dates=False,
                                  is_debug=False):
        if not isinstance(user_ids, list):
            user_ids = [user_ids]
        results_are_grouped = self.group_results_in_retrospective and not is_debug

        retro_months, datespan = self.get_first_days(
            current_month,
            num_previous_months,
            as_datespans=not results_are_grouped)
        monthly_totals = {}
        if results_are_grouped and not return_only_dates:
            monthly_totals = self.get_values_by_month(user_ids, datespan)

        retrospective = []
        for i, this_month in enumerate(retro_months):
            startdate = this_month if results_are_grouped else this_month.startdate
            y = str(startdate.year)
            m = str(startdate.month)
            if return_only_dates:
                month_value = 0
            elif results_are_grouped:
                month_value = monthly_totals.get(y, {}).get(m, 0)
            else:
                month_value = self.get_value(user_ids,
                                             this_month,
                                             is_debug=is_debug)
            monthly_result = {
                'date': startdate,
            }
            if isinstance(month_value, tuple):
                monthly_result['debug_data'] = month_value[1]
                month_value = month_value[0]
            monthly_result['value'] = month_value
            retrospective.append(monthly_result)
        return retrospective

    @classmethod
    def get_nice_name(cls):
        return "Simple Indicators"

    @classmethod
    def increment_or_create_unique(cls,
                                   namespace,
                                   domain,
                                   slug=None,
                                   version=None,
                                   **kwargs):
        if 'couch_view' in kwargs:
            # make sure that a viewname with trailing whitespace NEVER
            # gets created.
            kwargs['couch_view'] = kwargs['couch_view'].strip()

        super(CouchIndicatorDef,
              cls).increment_or_create_unique(namespace,
                                              domain,
                                              slug=slug,
                                              version=version,
                                              **kwargs)
Example #29
0
class RepeatRecord(Document):
    """
    An record of a particular instance of something that needs to be forwarded
    with a link to the proper repeater object
    """

    domain = StringProperty()
    repeater_id = StringProperty()
    repeater_type = StringProperty()
    payload_id = StringProperty()

    overall_tries = IntegerProperty(default=0)
    max_possible_tries = IntegerProperty(default=3)

    attempts = ListProperty(RepeatRecordAttempt)

    cancelled = BooleanProperty(default=False)
    registered_on = DateTimeProperty()
    last_checked = DateTimeProperty()
    failure_reason = StringProperty()
    next_check = DateTimeProperty()
    succeeded = BooleanProperty(default=False)

    @property
    def record_id(self):
        return self._id

    @classmethod
    def wrap(cls, data):
        should_bootstrap_attempts = ('attempts' not in data)

        self = super(RepeatRecord, cls).wrap(data)

        if should_bootstrap_attempts and self.last_checked:
            assert not self.attempts
            self.attempts = [RepeatRecordAttempt(
                cancelled=self.cancelled,
                datetime=self.last_checked,
                failure_reason=self.failure_reason,
                success_response=None,
                next_check=self.next_check,
                succeeded=self.succeeded,
            )]
        return self

    @property
    @memoized
    def repeater(self):
        try:
            return Repeater.get(self.repeater_id)
        except ResourceNotFound:
            return None

    @property
    def url(self):
        warnings.warn("RepeatRecord.url is deprecated. Use Repeater.get_url instead", DeprecationWarning)
        if self.repeater:
            return self.repeater.get_url(self)

    @property
    def state(self):
        state = RECORD_PENDING_STATE
        if self.succeeded:
            state = RECORD_SUCCESS_STATE
        elif self.cancelled:
            state = RECORD_CANCELLED_STATE
        elif self.failure_reason:
            state = RECORD_FAILURE_STATE
        return state

    @classmethod
    def all(cls, domain=None, due_before=None, limit=None):
        json_now = json_format_datetime(due_before or datetime.utcnow())
        repeat_records = RepeatRecord.view("repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, json_now, {}],
            include_docs=True,
            reduce=False,
            limit=limit,
        )
        return repeat_records

    @classmethod
    def count(cls, domain=None):
        results = RepeatRecord.view("repeaters/repeat_records_by_next_check",
            startkey=[domain],
            endkey=[domain, {}],
            reduce=True,
        ).one()
        return results['value'] if results else 0

    def add_attempt(self, attempt):
        self.attempts.append(attempt)
        self.last_checked = attempt.datetime
        self.next_check = attempt.next_check
        self.succeeded = attempt.succeeded
        self.cancelled = attempt.cancelled
        self.failure_reason = attempt.failure_reason

    def get_numbered_attempts(self):
        for i, attempt in enumerate(self.attempts):
            yield i + 1, attempt

    def postpone_by(self, duration):
        self.last_checked = datetime.utcnow()
        self.next_check = self.last_checked + duration
        self.save()

    def make_set_next_try_attempt(self, failure_reason):
        # we use an exponential back-off to avoid submitting to bad urls
        # too frequently.
        assert self.succeeded is False
        assert self.next_check is not None
        window = timedelta(minutes=0)
        if self.last_checked:
            window = self.next_check - self.last_checked
            window += (window // 2)  # window *= 1.5
        if window < MIN_RETRY_WAIT:
            window = MIN_RETRY_WAIT
        elif window > MAX_RETRY_WAIT:
            window = MAX_RETRY_WAIT

        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=failure_reason,
            success_response=None,
            next_check=now + window,
            succeeded=False,
        )

    def try_now(self):
        # try when we haven't succeeded and either we've
        # never checked, or it's time to check again
        return not self.succeeded

    def get_payload(self):
        return self.repeater.get_payload(self)

    def get_attempt_info(self):
        return self.repeater.get_attempt_info(self)

    def handle_payload_exception(self, exception):
        now = datetime.utcnow()
        return RepeatRecordAttempt(
            cancelled=True,
            datetime=now,
            failure_reason=six.text_type(exception),
            success_response=None,
            next_check=None,
            succeeded=False,
        )

    def fire(self, force_send=False):
        if self.try_now() or force_send:
            self.overall_tries += 1
            try:
                attempt = self.repeater.fire_for_record(self)
            except Exception as e:
                log_repeater_error_in_datadog(self.domain, status_code=None,
                                              repeater_type=self.repeater_type)
                attempt = self.handle_payload_exception(e)
                raise
            finally:
                # pycharm warns attempt might not be defined.
                # that'll only happen if fire_for_record raise a non-Exception exception (e.g. SIGINT)
                # or handle_payload_exception raises an exception. I'm okay with that. -DMR
                self.add_attempt(attempt)
                self.save()

    @staticmethod
    def _format_response(response):
        return '{}: {}.\n{}'.format(
            response.status_code, response.reason, getattr(response, 'content', None))

    def handle_success(self, response):
        """Do something with the response if the repeater succeeds
        """
        now = datetime.utcnow()
        log_repeater_success_in_datadog(
            self.domain,
            response.status_code if response else None,
            self.repeater_type
        )
        return RepeatRecordAttempt(
            cancelled=False,
            datetime=now,
            failure_reason=None,
            success_response=self._format_response(response) if response else None,
            next_check=None,
            succeeded=True,
            info=self.get_attempt_info(),
        )

    def handle_failure(self, response):
        """Do something with the response if the repeater fails
        """
        return self._make_failure_attempt(self._format_response(response), response)

    def handle_exception(self, exception):
        """handle internal exceptions
        """
        return self._make_failure_attempt(six.text_type(exception), None)

    def _make_failure_attempt(self, reason, response):
        log_repeater_error_in_datadog(self.domain, response.status_code if response else None,
                                      self.repeater_type)

        if self.repeater.allow_retries(response) and self.overall_tries < self.max_possible_tries:
            return self.make_set_next_try_attempt(reason)
        else:
            now = datetime.utcnow()
            return RepeatRecordAttempt(
                cancelled=True,
                datetime=now,
                failure_reason=reason,
                success_response=None,
                next_check=None,
                succeeded=False,
                info=self.get_attempt_info(),
            )

    def cancel(self):
        self.next_check = None
        self.cancelled = True

    def requeue(self):
        self.cancelled = False
        self.succeeded = False
        self.failure_reason = ''
        self.overall_tries = 0
        self.next_check = datetime.utcnow()
Example #30
0
class Domain(QuickCachedDocumentMixin, Document, SnapshotMixin):
    """Domain is the highest level collection of people/stuff
       in the system.  Pretty much everything happens at the
       domain-level, including user membership, permission to
       see data, reports, charts, etc."""

    name = StringProperty()
    is_active = BooleanProperty()
    date_created = DateTimeProperty()
    default_timezone = StringProperty(
        default=getattr(settings, "TIME_ZONE", "UTC"))
    case_sharing = BooleanProperty(default=False)
    secure_submissions = BooleanProperty(default=False)
    cloudcare_releases = StringProperty(
        choices=['stars', 'nostars', 'default'], default='default')
    organization = StringProperty()
    hr_name = StringProperty()  # the human-readable name for this project
    creating_user = StringProperty(
    )  # username of the user who created this domain

    # domain metadata
    project_type = StringProperty()  # e.g. MCH, HIV
    customer_type = StringProperty()  # plus, full, etc.
    is_test = StringProperty(choices=["true", "false", "none"], default="none")
    description = StringProperty()
    short_description = StringProperty()
    is_shared = BooleanProperty(default=False)
    commtrack_enabled = BooleanProperty(default=False)
    call_center_config = SchemaProperty(CallCenterProperties)
    has_careplan = BooleanProperty(default=False)
    restrict_superusers = BooleanProperty(default=False)
    allow_domain_requests = BooleanProperty(default=False)
    location_restriction_for_users = BooleanProperty(default=False)
    usercase_enabled = BooleanProperty(default=False)
    hipaa_compliant = BooleanProperty(default=False)
    use_sql_backend = BooleanProperty(default=False)

    case_display = SchemaProperty(CaseDisplaySettings)

    # CommConnect settings
    commconnect_enabled = BooleanProperty(default=False)
    survey_management_enabled = BooleanProperty(default=False)
    # Whether or not a case can register via sms
    sms_case_registration_enabled = BooleanProperty(default=False)
    # Case type to apply to cases registered via sms
    sms_case_registration_type = StringProperty()
    # Owner to apply to cases registered via sms
    sms_case_registration_owner_id = StringProperty()
    # Submitting user to apply to cases registered via sms
    sms_case_registration_user_id = StringProperty()
    # Whether or not a mobile worker can register via sms
    sms_mobile_worker_registration_enabled = BooleanProperty(default=False)
    use_default_sms_response = BooleanProperty(default=False)
    default_sms_response = StringProperty()
    chat_message_count_threshold = IntegerProperty()
    custom_chat_template = StringProperty(
    )  # See settings.CUSTOM_CHAT_TEMPLATES
    custom_case_username = StringProperty(
    )  # Case property to use when showing the case's name in a chat window
    # If empty, sms can be sent at any time. Otherwise, only send during
    # these windows of time. SMS_QUEUE_ENABLED must be True in localsettings
    # for this be considered.
    restricted_sms_times = SchemaListProperty(DayTimeWindow)
    # If empty, this is ignored. Otherwise, the framework will make sure
    # that during these days/times, no automated outbound sms will be sent
    # to someone if they have sent in an sms within sms_conversation_length
    # minutes. Outbound sms sent from a user in a chat window, however, will
    # still be sent. This is meant to prevent chat conversations from being
    # interrupted by automated sms reminders.
    # SMS_QUEUE_ENABLED must be True in localsettings for this to be
    # considered.
    sms_conversation_times = SchemaListProperty(DayTimeWindow)
    # In minutes, see above.
    sms_conversation_length = IntegerProperty(default=10)
    # Set to True to prevent survey questions and answers form being seen in
    # SMS chat windows.
    filter_surveys_from_chat = BooleanProperty(default=False)
    # The below option only matters if filter_surveys_from_chat = True.
    # If set to True, invalid survey responses will still be shown in the chat
    # window, while questions and valid responses will be filtered out.
    show_invalid_survey_responses_in_chat = BooleanProperty(default=False)
    # If set to True, if a message is read by anyone it counts as being read by
    # everyone. Set to False so that a message is only counted as being read
    # for a user if only that user has read it.
    count_messages_as_read_by_anyone = BooleanProperty(default=False)
    # Set to True to allow sending sms and all-label surveys to cases whose
    # phone number is duplicated with another contact
    send_to_duplicated_case_numbers = BooleanProperty(default=True)
    enable_registration_welcome_sms_for_case = BooleanProperty(default=False)
    enable_registration_welcome_sms_for_mobile_worker = BooleanProperty(
        default=False)
    sms_survey_date_format = StringProperty()

    # exchange/domain copying stuff
    is_snapshot = BooleanProperty(default=False)
    is_approved = BooleanProperty(default=False)
    snapshot_time = DateTimeProperty()
    published = BooleanProperty(default=False)
    license = StringProperty(choices=LICENSES, default='cc')
    title = StringProperty()
    cda = SchemaProperty(LicenseAgreement)
    multimedia_included = BooleanProperty(default=True)
    downloads = IntegerProperty(
        default=0)  # number of downloads for this specific snapshot
    full_downloads = IntegerProperty(
        default=0)  # number of downloads for all snapshots from this domain
    author = StringProperty()
    phone_model = StringProperty()
    attribution_notes = StringProperty()
    publisher = StringProperty(choices=["organization", "user"],
                               default="user")
    yt_id = StringProperty()
    snapshot_head = BooleanProperty(default=False)

    deployment = SchemaProperty(Deployment)

    image_path = StringProperty()
    image_type = StringProperty()

    cached_properties = DictProperty()

    internal = SchemaProperty(InternalProperties)

    dynamic_reports = SchemaListProperty(DynamicReportSet)

    # extra user specified properties
    tags = StringListProperty()
    area = StringProperty(choices=AREA_CHOICES)
    sub_area = StringProperty(choices=SUB_AREA_CHOICES)
    launch_date = DateTimeProperty

    # to be eliminated from projects and related documents when they are copied for the exchange
    _dirty_fields = ('admin_password', 'admin_password_charset', 'city',
                     'countries', 'region', 'customer_type')

    default_mobile_worker_redirect = StringProperty(default=None)
    last_modified = DateTimeProperty(default=datetime(2015, 1, 1))

    # when turned on, use SECURE_TIMEOUT for sessions of users who are members of this domain
    secure_sessions = BooleanProperty(default=False)

    two_factor_auth = BooleanProperty(default=False)
    strong_mobile_passwords = BooleanProperty(default=False)

    # There is no longer a way to request a report builder trial, so this property should be removed in the near
    # future. (Keeping it for now in case a user has requested a trial and but has not yet been granted it)
    requested_report_builder_trial = StringListProperty()
    requested_report_builder_subscription = StringListProperty()

    @classmethod
    def wrap(cls, data):
        # for domains that still use original_doc
        should_save = False
        if 'original_doc' in data:
            original_doc = data['original_doc']
            del data['original_doc']
            should_save = True
            if original_doc:
                original_doc = Domain.get_by_name(original_doc)
                data['copy_history'] = [original_doc._id]

        # for domains that have a public domain license
        if 'license' in data:
            if data.get("license", None) == "public":
                data["license"] = "cc"
                should_save = True

        if 'slug' in data and data["slug"]:
            data["hr_name"] = data["slug"]
            del data["slug"]

        if 'is_test' in data and isinstance(data["is_test"], bool):
            data["is_test"] = "true" if data["is_test"] else "false"
            should_save = True

        if 'cloudcare_releases' not in data:
            data['cloudcare_releases'] = 'nostars'  # legacy default setting

        # Don't actually remove location_types yet.  We can migrate fully and
        # remove this after everything's hunky-dory in production.  2015-03-06
        if 'location_types' in data:
            data['obsolete_location_types'] = data.pop('location_types')

        self = super(Domain, cls).wrap(data)
        if self.deployment is None:
            self.deployment = Deployment()
        if should_save:
            self.save()
        return self

    def get_default_timezone(self):
        """return a timezone object from self.default_timezone"""
        import pytz
        return pytz.timezone(self.default_timezone)

    @staticmethod
    @quickcache(['name'], timeout=24 * 60 * 60)
    def is_secure_session_required(name):
        domain = Domain.get_by_name(name)
        return domain and domain.secure_sessions

    @staticmethod
    @skippable_quickcache(['couch_user._id', 'is_active'],
                          skip_arg='strict',
                          timeout=5 * 60,
                          memoize_timeout=10)
    def active_for_couch_user(couch_user, is_active=True, strict=False):
        domain_names = couch_user.get_domains()
        return Domain.view(
            "domain/by_status",
            keys=[[is_active, d] for d in domain_names],
            reduce=False,
            include_docs=True,
            stale=settings.COUCH_STALE_QUERY if not strict else None,
        ).all()

    @staticmethod
    def active_for_user(user, is_active=True, strict=False):
        if isinstance(user, AnonymousUser):
            return []
        from corehq.apps.users.models import CouchUser
        if isinstance(user, CouchUser):
            couch_user = user
        else:
            couch_user = CouchUser.from_django_user(user)
        if couch_user:
            return Domain.active_for_couch_user(couch_user,
                                                is_active=is_active,
                                                strict=strict)
        else:
            return []

    @classmethod
    def field_by_prefix(cls, field, prefix=''):
        # unichr(0xfff8) is something close to the highest character available
        res = cls.view(
            "domain/fields_by_prefix",
            group=True,
            startkey=[field, True, prefix],
            endkey=[field, True,
                    "%s%c" % (prefix, unichr(0xfff8)), {}])
        vals = [(d['value'], d['key'][2]) for d in res]
        vals.sort(reverse=True)
        return [(v[1], v[0]) for v in vals]

    def add(self, model_instance, is_active=True):
        """
        Add something to this domain, through the generic relation.
        Returns the created membership object
        """
        # Add membership info to Couch
        couch_user = model_instance.get_profile().get_couch_user()
        couch_user.add_domain_membership(self.name)
        couch_user.save()

    def applications(self):
        return get_brief_apps_in_domain(self.name)

    def full_applications(self, include_builds=True):
        from corehq.apps.app_manager.models import Application, RemoteApp
        WRAPPERS = {'Application': Application, 'RemoteApp': RemoteApp}

        def wrap_application(a):
            return WRAPPERS[a['doc']['doc_type']].wrap(a['doc'])

        if include_builds:
            startkey = [self.name]
            endkey = [self.name, {}]
        else:
            startkey = [self.name, None]
            endkey = [self.name, None, {}]

        return Application.get_db().view('app_manager/applications',
                                         startkey=startkey,
                                         endkey=endkey,
                                         include_docs=True,
                                         wrapper=wrap_application).all()

    @cached_property
    def versions(self):
        apps = self.applications()
        return list(set(a.application_version for a in apps))

    @cached_property
    def has_case_management(self):
        for app in self.full_applications():
            if app.doc_type == 'Application':
                if app.has_case_management():
                    return True
        return False

    @cached_property
    def has_media(self):
        for app in self.full_applications():
            if app.doc_type == 'Application' and app.has_media():
                return True
        return False

    @property
    def use_cloudcare_releases(self):
        return self.cloudcare_releases != 'nostars'

    def all_users(self):
        from corehq.apps.users.models import CouchUser
        return CouchUser.by_domain(self.name)

    def recent_submissions(self):
        return domain_has_submission_in_last_30_days(self.name)

    @cached_property
    def languages(self):
        apps = self.applications()
        return set(chain.from_iterable([a.langs for a in apps]))

    def readable_languages(self):
        return ', '.join(lang_lookup[lang] or lang
                         for lang in self.languages())

    def __unicode__(self):
        return self.name

    @classmethod
    @skippable_quickcache(['name'], skip_arg='strict', timeout=30 * 60)
    def get_by_name(cls, name, strict=False):
        if not name:
            # get_by_name should never be called with name as None (or '', etc)
            # I fixed the code in such a way that if I raise a ValueError
            # all tests pass and basic pages load,
            # but in order not to break anything in the wild,
            # I'm opting to notify by email if/when this happens
            # but fall back to the previous behavior of returning None
            if settings.DEBUG:
                raise ValueError('%r is not a valid domain name' % name)
            else:
                _assert = soft_assert(notify_admins=True,
                                      exponential_backoff=False)
                _assert(False, '%r is not a valid domain name' % name)
                return None

        def _get_by_name(stale=False):
            extra_args = {'stale': settings.COUCH_STALE_QUERY} if stale else {}
            result = cls.view("domain/domains",
                              key=name,
                              reduce=False,
                              include_docs=True,
                              **extra_args).first()
            if not isinstance(result, Domain):
                # A stale view may return a result with no doc if the doc has just been deleted.
                # In this case couchdbkit just returns the raw view result as a dict
                return None
            else:
                return result

        domain = _get_by_name(stale=(not strict))
        if domain is None and not strict:
            # on the off chance this is a brand new domain, try with strict
            domain = _get_by_name(stale=False)
        return domain

    @classmethod
    def get_or_create_with_name(cls,
                                name,
                                is_active=False,
                                secure_submissions=True):
        result = cls.view("domain/domains",
                          key=name,
                          reduce=False,
                          include_docs=True).first()
        if result:
            return result
        else:
            new_domain = Domain(
                name=name,
                is_active=is_active,
                date_created=datetime.utcnow(),
                secure_submissions=secure_submissions,
            )
            new_domain.save(**get_safe_write_kwargs())
            return new_domain

    @classmethod
    def generate_name(cls, hr_name, max_length=25):
        '''
        Generate a URL-friendly name based on a given human-readable name.
        Normalizes given name, then looks for conflicting domains, addressing
        conflicts by adding "-1", "-2", etc. May return None if it fails to
        generate a new, unique name. Throws exception if it can't figure out
        a name, which shouldn't happen unless max_length is absurdly short.
        '''

        name = name_to_url(hr_name, "project")
        if Domain.get_by_name(name):
            prefix = name
            while len(prefix):
                name = next_available_name(
                    prefix, Domain.get_names_by_prefix(prefix + '-'))
                if Domain.get_by_name(name):
                    # should never happen
                    raise NameUnavailableException
                if len(name) <= max_length:
                    return name
                prefix = prefix[:-1]
            raise NameUnavailableException

        return name

    @classmethod
    def get_all(cls, include_docs=True):
        domains = Domain.view("domain/not_snapshots", include_docs=False).all()
        if not include_docs:
            return domains
        else:
            return imap(cls.wrap,
                        iter_docs(cls.get_db(), [d['id'] for d in domains]))

    @classmethod
    def get_all_names(cls):
        return [d['key'] for d in cls.get_all(include_docs=False)]

    @classmethod
    def get_all_ids(cls):
        return [d['id'] for d in cls.get_all(include_docs=False)]

    @classmethod
    def get_names_by_prefix(cls, prefix):
        return [
            d['key'] for d in Domain.view("domain/domains",
                                          startkey=prefix,
                                          endkey=prefix + u"zzz",
                                          reduce=False,
                                          include_docs=False).all()
        ]

    def case_sharing_included(self):
        return self.case_sharing or reduce(lambda x, y: x or y, [
            getattr(app, 'case_sharing', False) for app in self.applications()
        ], False)

    def save(self, **params):
        self.last_modified = datetime.utcnow()
        if not self._rev:
            # mark any new domain as timezone migration complete
            set_migration_complete(self.name)
        super(Domain, self).save(**params)

        from corehq.apps.domain.signals import commcare_domain_post_save
        results = commcare_domain_post_save.send_robust(sender='domain',
                                                        domain=self)
        for result in results:
            # Second argument is None if there was no error
            if result[1]:
                notify_exception(
                    None,
                    message="Error occured during domain post_save %s: %s" %
                    (self.name, str(result[1])))

    def save_copy(self,
                  new_domain_name=None,
                  new_hr_name=None,
                  user=None,
                  copy_by_id=None,
                  share_reminders=True,
                  share_user_roles=True):
        from corehq.apps.app_manager.dbaccessors import get_app
        from corehq.apps.reminders.models import CaseReminderHandler
        from corehq.apps.fixtures.models import FixtureDataItem
        from corehq.apps.app_manager.dbaccessors import get_brief_apps_in_domain
        from corehq.apps.domain.dbaccessors import get_doc_ids_in_domain_by_class
        from corehq.apps.fixtures.models import FixtureDataType
        from corehq.apps.users.models import UserRole

        db = Domain.get_db()
        new_id = db.copy_doc(self.get_id)['id']
        if new_domain_name is None:
            new_domain_name = new_id

        with CriticalSection(
            ['request_domain_name_{}'.format(new_domain_name)]):
            new_domain_name = Domain.generate_name(new_domain_name)
            new_domain = Domain.get(new_id)
            new_domain.name = new_domain_name
            new_domain.hr_name = new_hr_name
            new_domain.copy_history = self.get_updated_history()
            new_domain.is_snapshot = False
            new_domain.snapshot_time = None
            new_domain.organization = None  # TODO: use current user's organization (?)

            # reset stuff
            new_domain.cda.signed = False
            new_domain.cda.date = None
            new_domain.cda.type = None
            new_domain.cda.user_id = None
            new_domain.cda.user_ip = None
            new_domain.is_test = "none"
            new_domain.internal = InternalProperties()
            new_domain.creating_user = user.username if user else None

            for field in self._dirty_fields:
                if hasattr(new_domain, field):
                    delattr(new_domain, field)

            # Saving the domain should happen before we import any apps since
            # importing apps can update the domain object (for example, if user
            # as a case needs to be enabled)
            new_domain.save()

            new_app_components = {}  # a mapping of component's id to its copy

            def copy_data_items(old_type_id, new_type_id):
                for item in FixtureDataItem.by_data_type(
                        self.name, old_type_id):
                    comp = self.copy_component(item.doc_type,
                                               item._id,
                                               new_domain_name,
                                               user=user)
                    comp.data_type_id = new_type_id
                    comp.save()

            def get_latest_app_id(doc_id):
                app = get_app(self.name, doc_id).get_latest_saved()
                if app:
                    return app._id, app.doc_type

            for app in get_brief_apps_in_domain(self.name):
                doc_id, doc_type = app.get_id, app.doc_type
                original_doc_id = doc_id
                if copy_by_id and doc_id not in copy_by_id:
                    continue
                if not self.is_snapshot:
                    doc_id, doc_type = get_latest_app_id(doc_id) or (doc_id,
                                                                     doc_type)
                component = self.copy_component(doc_type,
                                                doc_id,
                                                new_domain_name,
                                                user=user)
                if component:
                    new_app_components[original_doc_id] = component

            for doc_id in get_doc_ids_in_domain_by_class(
                    self.name, FixtureDataType):
                if copy_by_id and doc_id not in copy_by_id:
                    continue
                component = self.copy_component('FixtureDataType',
                                                doc_id,
                                                new_domain_name,
                                                user=user)
                copy_data_items(doc_id, component._id)

            if share_reminders:
                for doc_id in get_doc_ids_in_domain_by_class(
                        self.name, CaseReminderHandler):
                    self.copy_component('CaseReminderHandler',
                                        doc_id,
                                        new_domain_name,
                                        user=user)
            if share_user_roles:
                for doc_id in get_doc_ids_in_domain_by_class(
                        self.name, UserRole):
                    self.copy_component('UserRole',
                                        doc_id,
                                        new_domain_name,
                                        user=user)

        if user:

            def add_dom_to_user(user):
                user.add_domain_membership(new_domain_name, is_admin=True)

            apply_update(user, add_dom_to_user)

        def update_events(handler):
            """
            Change the form_unique_id to the proper form for each event in a newly copied CaseReminderHandler
            """
            from corehq.apps.app_manager.models import FormBase
            for event in handler.events:
                if not event.form_unique_id:
                    continue
                form = FormBase.get_form(event.form_unique_id)
                form_app = form.get_app()
                m_index, f_index = form_app.get_form_location(form.unique_id)
                form_copy = new_app_components[form_app._id].get_module(
                    m_index).get_form(f_index)
                event.form_unique_id = form_copy.unique_id

        def update_for_copy(handler):
            handler.active = False
            update_events(handler)

        if share_reminders:
            for handler in CaseReminderHandler.get_handlers(new_domain_name):
                apply_update(handler, update_for_copy)

        return new_domain

    def reminder_should_be_copied(self, handler):
        from corehq.apps.reminders.models import ON_DATETIME
        return (handler.start_condition_type != ON_DATETIME
                and handler.user_group_id is None)

    def copy_component(self, doc_type, id, new_domain_name, user=None):
        from corehq.apps.app_manager.models import import_app
        from corehq.apps.users.models import UserRole
        from corehq.apps.reminders.models import CaseReminderHandler
        from corehq.apps.fixtures.models import FixtureDataType, FixtureDataItem

        str_to_cls = {
            'UserRole': UserRole,
            'CaseReminderHandler': CaseReminderHandler,
            'FixtureDataType': FixtureDataType,
            'FixtureDataItem': FixtureDataItem,
        }
        if doc_type in ('Application', 'RemoteApp'):
            new_doc = import_app(id, new_domain_name)
            new_doc.copy_history.append(id)
            new_doc.case_sharing = False
            # when copying from app-docs that don't have
            # unique_id attribute on Modules
            new_doc.ensure_module_unique_ids(should_save=False)
        else:
            cls = str_to_cls[doc_type]
            db = cls.get_db()
            if doc_type == 'CaseReminderHandler':
                cur_doc = cls.get(id)
                if not self.reminder_should_be_copied(cur_doc):
                    return None

            new_id = db.copy_doc(id)['id']

            new_doc = cls.get(new_id)

            for field in self._dirty_fields:
                if hasattr(new_doc, field):
                    delattr(new_doc, field)

            if hasattr(cls, '_meta_fields'):
                for field in cls._meta_fields:
                    if not field.startswith('_') and hasattr(new_doc, field):
                        delattr(new_doc, field)

            new_doc.domain = new_domain_name

            if doc_type == 'FixtureDataType':
                new_doc.copy_from = id
                new_doc.is_global = True

        if self.is_snapshot and doc_type == 'Application':
            new_doc.prepare_multimedia_for_exchange()

        new_doc.save()
        return new_doc

    def save_snapshot(self, share_reminders, copy_by_id=None):
        if self.is_snapshot:
            return self
        else:
            try:
                copy = self.save_copy(copy_by_id=copy_by_id,
                                      share_reminders=share_reminders,
                                      share_user_roles=False)
            except NameUnavailableException:
                return None
            copy.is_snapshot = True
            head = self.snapshots(limit=1).first()
            if head and head.snapshot_head:
                head.snapshot_head = False
                head.save()
            copy.snapshot_head = True
            copy.snapshot_time = datetime.utcnow()
            del copy.deployment
            copy.save()
            return copy

    def snapshots(self, **view_kwargs):
        return Domain.view('domain/snapshots',
                           startkey=[self._id, {}],
                           endkey=[self._id],
                           include_docs=True,
                           reduce=False,
                           descending=True,
                           **view_kwargs)

    @memoized
    def published_snapshot(self):
        snapshots = self.snapshots().all()
        for snapshot in snapshots:
            if snapshot.published:
                return snapshot
        return None

    def update_deployment(self, **kwargs):
        self.deployment.update(kwargs)
        self.save()

    def update_internal(self, **kwargs):
        self.internal.update(kwargs)
        self.save()

    def display_name(self):
        if self.is_snapshot:
            return "Snapshot of %s" % self.copied_from.display_name()
        return self.hr_name or self.name

    def long_display_name(self):
        if self.is_snapshot:
            return format_html("Snapshot of {}",
                               self.copied_from.display_name())
        return self.hr_name or self.name

    __str__ = long_display_name

    def get_license_display(self):
        return LICENSES.get(self.license)

    def get_license_url(self):
        return LICENSE_LINKS.get(self.license)

    def copies(self):
        return Domain.view('domain/copied_from_snapshot',
                           key=self._id,
                           include_docs=True)

    def copies_of_parent(self):
        return Domain.view('domain/copied_from_snapshot',
                           keys=[s._id for s in self.copied_from.snapshots()],
                           include_docs=True)

    def delete(self):
        self._pre_delete()
        super(Domain, self).delete()

    def _pre_delete(self):
        from corehq.apps.domain.signals import commcare_domain_pre_delete
        from corehq.apps.domain.deletion import apply_deletion_operations

        dynamic_deletion_operations = []
        results = commcare_domain_pre_delete.send_robust(sender='domain',
                                                         domain=self)
        for result in results:
            response = result[1]
            if isinstance(response, Exception):
                raise DomainDeleteException(
                    u"Error occurred during domain pre_delete {}: {}".format(
                        self.name, str(response)))
            elif response:
                assert isinstance(response, list)
                dynamic_deletion_operations.extend(response)

        # delete all associated objects
        for db, related_doc_ids in get_all_doc_ids_for_domain_grouped_by_db(
                self.name):
            iter_bulk_delete(db, related_doc_ids, chunksize=500)

        apply_deletion_operations(self.name, dynamic_deletion_operations)

    def all_media(self, from_apps=None):  # todo add documentation or refactor
        from corehq.apps.hqmedia.models import CommCareMultimedia
        dom_with_media = self if not self.is_snapshot else self.copied_from

        if self.is_snapshot:
            app_ids = [
                app.copied_from.get_id for app in self.full_applications()
            ]
            if from_apps:
                from_apps = set(
                    [a_id for a_id in app_ids if a_id in from_apps])
            else:
                from_apps = app_ids

        if from_apps:
            media = []
            media_ids = set()
            apps = [
                app for app in dom_with_media.full_applications()
                if app.get_id in from_apps
            ]
            for app in apps:
                if app.doc_type != 'Application':
                    continue
                for _, m in app.get_media_objects():
                    if m.get_id not in media_ids:
                        media.append(m)
                        media_ids.add(m.get_id)
            return media

        return CommCareMultimedia.view('hqmedia/by_domain',
                                       key=dom_with_media.name,
                                       include_docs=True).all()

    def most_restrictive_licenses(self, apps_to_check=None):
        from corehq.apps.hqmedia.utils import most_restrictive
        licenses = [
            m.license['type'] for m in self.all_media(from_apps=apps_to_check)
            if m.license
        ]
        return most_restrictive(licenses)

    @classmethod
    def get_module_by_name(cls, domain_name):
        """
        import and return the python module corresponding to domain_name, or
        None if it doesn't exist.
        """
        from corehq.apps.domain.utils import get_domain_module_map
        module_name = get_domain_module_map().get(domain_name, domain_name)

        try:
            return import_module(module_name) if module_name else None
        except ImportError:
            return None

    @property
    @memoized
    def commtrack_settings(self):
        # this import causes some dependency issues so lives in here
        from corehq.apps.commtrack.models import CommtrackConfig
        if self.commtrack_enabled:
            return CommtrackConfig.for_domain(self.name)
        else:
            return None

    @property
    def has_custom_logo(self):
        return (self['_attachments']
                and LOGO_ATTACHMENT in self['_attachments'])

    def get_custom_logo(self):
        if not self.has_custom_logo:
            return None

        return (self.fetch_attachment(LOGO_ATTACHMENT),
                self['_attachments'][LOGO_ATTACHMENT]['content_type'])

    def get_case_display(self, case):
        """Get the properties display definition for a given case"""
        return self.case_display.case_details.get(case.type)

    def get_form_display(self, form):
        """Get the properties display definition for a given XFormInstance"""
        return self.case_display.form_details.get(form.xmlns)

    @property
    def total_downloads(self):
        """
            Returns the total number of downloads from every snapshot created from this domain
        """
        from corehq.apps.domain.dbaccessors import count_downloads_for_all_snapshots
        return count_downloads_for_all_snapshots(self.get_id)

    @property
    @memoized
    def download_count(self):
        """
            Updates and returns the total number of downloads from every sister snapshot.
        """
        if self.is_snapshot:
            self.full_downloads = self.copied_from.total_downloads
        return self.full_downloads

    @property
    @memoized
    def published_by(self):
        from corehq.apps.users.models import CouchUser
        pb_id = self.cda.user_id
        return CouchUser.get_by_user_id(pb_id) if pb_id else None

    @property
    def name_of_publisher(self):
        return self.published_by.human_friendly_name if self.published_by else ""

    @property
    def location_types(self):
        from corehq.apps.locations.models import LocationType
        return LocationType.objects.filter(domain=self.name).all()

    @memoized
    def has_privilege(self, privilege):
        from corehq.apps.accounting.utils import domain_has_privilege
        return domain_has_privilege(self, privilege)

    @property
    @memoized
    def uses_locations(self):
        from corehq import privileges
        from corehq.apps.locations.models import LocationType
        return (self.has_privilege(privileges.LOCATIONS) and
                (self.commtrack_enabled
                 or LocationType.objects.filter(domain=self.name).exists()))

    @property
    def supports_multiple_locations_per_user(self):
        """
        This method is a wrapper around the toggle that
        enables multiple location functionality. Callers of this
        method should know that this is special functionality
        left around for special applications, and not a feature
        flag that should be set normally.
        """
        return toggles.MULTIPLE_LOCATIONS_PER_USER.enabled(self.name)

    def convert_to_commtrack(self):
        """
        One-stop-shop to make a domain CommTrack
        """
        from corehq.apps.commtrack.util import make_domain_commtrack
        make_domain_commtrack(self)

    def clear_caches(self):
        from .utils import domain_restricts_superusers
        super(Domain, self).clear_caches()
        self.get_by_name.clear(self.__class__, self.name)
        self.is_secure_session_required.clear(self.name)
        domain_restricts_superusers.clear(self.name)