class Domain(QuickCachedDocumentMixin, BlobMixin, Document, SnapshotMixin): """ Domain is the highest level collection of people/stuff in the system. Pretty much everything happens at the domain-level, including user membership, permission to see data, reports, charts, etc. Exceptions: accounting has some models that combine multiple domains, which make "enterprise" multi-domain features like the enterprise dashboard possible. Naming conventions: Most often, variables representing domain names are named `domain`, and variables representing domain objects are named `domain_obj`. New code should follow this convention, unless it's in an area that consistently uses `domain` for the object and `domain_name` for the string. There's a `project` attribute attached to requests that's a domain object. In spite of this, don't use `project` in new code. """ _blobdb_type_code = BLOB_CODES.domain name = StringProperty() is_active = BooleanProperty() date_created = DateTimeProperty() default_timezone = StringProperty(default=getattr(settings, "TIME_ZONE", "UTC")) default_geocoder_location = DictProperty() case_sharing = BooleanProperty(default=False) secure_submissions = BooleanProperty(default=False) cloudcare_releases = StringProperty(choices=['stars', 'nostars', 'default'], default='default') organization = StringProperty() hr_name = StringProperty() # the human-readable name for this project project_description = StringProperty() # Brief description of the project creating_user = StringProperty() # username of the user who created this domain # domain metadata project_type = StringProperty() # e.g. MCH, HIV customer_type = StringProperty() # plus, full, etc. is_test = StringProperty(choices=["true", "false", "none"], default="none") description = StringProperty() short_description = StringProperty() is_shared = BooleanProperty(default=False) commtrack_enabled = BooleanProperty(default=False) call_center_config = SchemaProperty(CallCenterProperties) restrict_superusers = BooleanProperty(default=False) allow_domain_requests = BooleanProperty(default=False) location_restriction_for_users = BooleanProperty(default=False) usercase_enabled = BooleanProperty(default=False) hipaa_compliant = BooleanProperty(default=False) use_livequery = BooleanProperty(default=False) use_sql_backend = BooleanProperty(default=False) first_domain_for_user = BooleanProperty(default=False) # CommConnect settings survey_management_enabled = BooleanProperty(default=False) # Whether or not a case can register via sms sms_case_registration_enabled = BooleanProperty(default=False) # Case type to apply to cases registered via sms sms_case_registration_type = StringProperty() # Owner to apply to cases registered via sms sms_case_registration_owner_id = StringProperty() # Submitting user to apply to cases registered via sms sms_case_registration_user_id = StringProperty() # Whether or not a mobile worker can register via sms sms_mobile_worker_registration_enabled = BooleanProperty(default=False) use_default_sms_response = BooleanProperty(default=False) default_sms_response = StringProperty() chat_message_count_threshold = IntegerProperty() sms_language_fallback = StringProperty() custom_chat_template = StringProperty() # See settings.CUSTOM_CHAT_TEMPLATES custom_case_username = StringProperty() # Case property to use when showing the case's name in a chat window # If empty, sms can be sent at any time. Otherwise, only send during # these windows of time. SMS_QUEUE_ENABLED must be True in localsettings # for this be considered. restricted_sms_times = SchemaListProperty(DayTimeWindow) # If empty, this is ignored. Otherwise, the framework will make sure # that during these days/times, no automated outbound sms will be sent # to someone if they have sent in an sms within sms_conversation_length # minutes. Outbound sms sent from a user in a chat window, however, will # still be sent. This is meant to prevent chat conversations from being # interrupted by automated sms reminders. # SMS_QUEUE_ENABLED must be True in localsettings for this to be # considered. sms_conversation_times = SchemaListProperty(DayTimeWindow) # In minutes, see above. sms_conversation_length = IntegerProperty(default=10) # Set to True to prevent survey questions and answers form being seen in # SMS chat windows. filter_surveys_from_chat = BooleanProperty(default=False) # The below option only matters if filter_surveys_from_chat = True. # If set to True, invalid survey responses will still be shown in the chat # window, while questions and valid responses will be filtered out. show_invalid_survey_responses_in_chat = BooleanProperty(default=False) # If set to True, if a message is read by anyone it counts as being read by # everyone. Set to False so that a message is only counted as being read # for a user if only that user has read it. count_messages_as_read_by_anyone = BooleanProperty(default=False) enable_registration_welcome_sms_for_case = BooleanProperty(default=False) enable_registration_welcome_sms_for_mobile_worker = BooleanProperty(default=False) sms_survey_date_format = StringProperty() granted_messaging_access = BooleanProperty(default=False) # Allowed outbound SMS per day # If this is None, then the default is applied. See get_daily_outbound_sms_limit() custom_daily_outbound_sms_limit = IntegerProperty() # Twilio Whatsapp-enabled phone number twilio_whatsapp_phone_number = StringProperty() # Allowed number of case updates or closes from automatic update rules in the daily rule run. # If this value is None, the value in settings.MAX_RULE_UPDATES_IN_ONE_RUN is used. auto_case_update_limit = IntegerProperty() # Time to run auto case update rules. Expected values are 0-23. # If this value is None, the value in settings.RULE_UPDATE_HOUR is used. auto_case_update_hour = IntegerProperty() # Allowed number of max OData feeds that this domain can create. # If this value is None, the value in settings.DEFAULT_ODATA_FEED_LIMIT is used odata_feed_limit = IntegerProperty() # exchange/domain copying stuff is_snapshot = BooleanProperty(default=False) is_approved = BooleanProperty(default=False) snapshot_time = DateTimeProperty() published = BooleanProperty(default=False) license = StringProperty(choices=LICENSES, default='cc') title = StringProperty() cda = SchemaProperty(LicenseAgreement) multimedia_included = BooleanProperty(default=True) downloads = IntegerProperty(default=0) # number of downloads for this specific snapshot full_downloads = IntegerProperty(default=0) # number of downloads for all snapshots from this domain author = StringProperty() phone_model = StringProperty() attribution_notes = StringProperty() publisher = StringProperty(choices=["organization", "user"], default="user") yt_id = StringProperty() snapshot_head = BooleanProperty(default=False) deployment = SchemaProperty(Deployment) cached_properties = DictProperty() internal = SchemaProperty(InternalProperties) dynamic_reports = SchemaListProperty(DynamicReportSet) # extra user specified properties tags = StringListProperty() area = StringProperty(choices=AREA_CHOICES) sub_area = StringProperty(choices=SUB_AREA_CHOICES) launch_date = DateTimeProperty last_modified = DateTimeProperty(default=datetime(2015, 1, 1)) # when turned on, use settings.SECURE_TIMEOUT for sessions of users who are members of this domain secure_sessions = BooleanProperty(default=False) secure_sessions_timeout = IntegerProperty() two_factor_auth = BooleanProperty(default=False) strong_mobile_passwords = BooleanProperty(default=False) requested_report_builder_subscription = StringListProperty() report_whitelist = StringListProperty() # seconds between sending mobile UCRs to users. Can be overridden per user default_mobile_ucr_sync_interval = IntegerProperty() ga_opt_out = BooleanProperty(default=False) @classmethod def wrap(cls, data): # for domains that still use original_doc should_save = False if 'original_doc' in data: original_doc = data['original_doc'] del data['original_doc'] should_save = True if original_doc: original_doc = Domain.get_by_name(original_doc) data['copy_history'] = [original_doc._id] # for domains that have a public domain license if 'license' in data: if data.get("license", None) == "public": data["license"] = "cc" should_save = True if 'slug' in data and data["slug"]: data["hr_name"] = data["slug"] del data["slug"] if 'is_test' in data and isinstance(data["is_test"], bool): data["is_test"] = "true" if data["is_test"] else "false" should_save = True if 'cloudcare_releases' not in data: data['cloudcare_releases'] = 'nostars' # legacy default setting # Don't actually remove location_types yet. We can migrate fully and # remove this after everything's hunky-dory in production. 2015-03-06 if 'location_types' in data: data['obsolete_location_types'] = data.pop('location_types') if 'granted_messaging_access' not in data: # enable messaging for domains created before this flag was added data['granted_messaging_access'] = True self = super(Domain, cls).wrap(data) if self.deployment is None: self.deployment = Deployment() if should_save: self.save() return self def get_default_timezone(self): """return a timezone object from self.default_timezone""" import pytz return pytz.timezone(self.default_timezone) @staticmethod @quickcache(['name'], timeout=24 * 60 * 60) def is_secure_session_required(name): domain_obj = Domain.get_by_name(name) return domain_obj and domain_obj.secure_sessions @staticmethod @quickcache(['name'], timeout=24 * 60 * 60) def secure_timeout(name): domain_obj = Domain.get_by_name(name) if not domain_obj: return None if domain_obj.secure_sessions: if toggles.SECURE_SESSION_TIMEOUT.enabled(name): return domain_obj.secure_sessions_timeout or settings.SECURE_TIMEOUT return settings.SECURE_TIMEOUT return None @staticmethod @quickcache(['couch_user._id', 'is_active'], timeout=5*60, memoize_timeout=10) def active_for_couch_user(couch_user, is_active=True): domain_names = couch_user.get_domains() return Domain.view( "domain/by_status", keys=[[is_active, d] for d in domain_names], reduce=False, include_docs=True, ).all() @staticmethod def active_for_user(user, is_active=True): if isinstance(user, AnonymousUser): return [] from corehq.apps.users.models import CouchUser if isinstance(user, CouchUser): couch_user = user else: couch_user = CouchUser.from_django_user(user) if couch_user: return Domain.active_for_couch_user(couch_user, is_active=is_active) else: return [] def add(self, model_instance, is_active=True): """ Add something to this domain, through the generic relation. Returns the created membership object """ # Add membership info to Couch couch_user = model_instance.get_profile().get_couch_user() couch_user.add_domain_membership(self.name) couch_user.save() def applications(self): return get_brief_apps_in_domain(self.name) def full_applications(self, include_builds=True): from corehq.apps.app_manager.util import get_correct_app_class from corehq.apps.app_manager.models import Application def wrap_application(a): return get_correct_app_class(a['doc']).wrap(a['doc']) if include_builds: startkey = [self.name] endkey = [self.name, {}] else: startkey = [self.name, None] endkey = [self.name, None, {}] return Application.get_db().view('app_manager/applications', startkey=startkey, endkey=endkey, include_docs=True, wrapper=wrap_application).all() @cached_property def versions(self): apps = self.applications() return list(set(a.application_version for a in apps)) @cached_property def has_media(self): from corehq.apps.app_manager.util import is_remote_app for app in self.full_applications(): if not is_remote_app(app) and app.has_media(): return True return False @property def use_cloudcare_releases(self): return self.cloudcare_releases != 'nostars' def all_users(self): from corehq.apps.users.models import CouchUser return CouchUser.by_domain(self.name) def recent_submissions(self): return domain_has_submission_in_last_30_days(self.name) @classmethod @quickcache(['name'], skip_arg='strict', timeout=30*60, session_function=icds_conditional_session_key()) def get_by_name(cls, name, strict=False): if not name: # get_by_name should never be called with name as None (or '', etc) # I fixed the code in such a way that if I raise a ValueError # all tests pass and basic pages load, # but in order not to break anything in the wild, # I'm opting to notify by email if/when this happens # but fall back to the previous behavior of returning None if settings.DEBUG: raise ValueError('%r is not a valid domain name' % name) else: _assert = soft_assert(notify_admins=True, exponential_backoff=False) _assert(False, '%r is not a valid domain name' % name) return None def _get_by_name(stale=False): extra_args = {'stale': settings.COUCH_STALE_QUERY} if stale else {} result = cls.view("domain/domains", key=name, reduce=False, include_docs=True, **extra_args).first() if not isinstance(result, Domain): # A stale view may return a result with no doc if the doc has just been deleted. # In this case couchdbkit just returns the raw view result as a dict return None else: return result domain = _get_by_name(stale=(not strict)) if domain is None and not strict: # on the off chance this is a brand new domain, try with strict domain = _get_by_name(stale=False) return domain @classmethod def get_or_create_with_name(cls, name, is_active=False, secure_submissions=True, use_sql_backend=False): result = cls.view("domain/domains", key=name, reduce=False, include_docs=True).first() if result: return result else: new_domain = Domain( name=name, is_active=is_active, date_created=datetime.utcnow(), secure_submissions=secure_submissions, use_livequery=True, use_sql_backend=use_sql_backend, ) new_domain.save(**get_safe_write_kwargs()) return new_domain @classmethod def generate_name(cls, hr_name, max_length=25): ''' Generate a URL-friendly name based on a given human-readable name. Normalizes given name, then looks for conflicting domains, addressing conflicts by adding "-1", "-2", etc. May return None if it fails to generate a new, unique name. Throws exception if it can't figure out a name, which shouldn't happen unless max_length is absurdly short. ''' from corehq.apps.domain.utils import get_domain_url_slug from corehq.apps.domain.dbaccessors import domain_or_deleted_domain_exists name = get_domain_url_slug(hr_name, max_length=max_length) if not name: raise NameUnavailableException if domain_or_deleted_domain_exists(name): prefix = name while len(prefix): name = next_available_name(prefix, Domain.get_names_by_prefix(prefix + '-')) if domain_or_deleted_domain_exists(name): # should never happen raise NameUnavailableException if len(name) <= max_length: return name prefix = prefix[:-1] raise NameUnavailableException return name @classmethod def get_all(cls, include_docs=True): domains = Domain.view("domain/not_snapshots", include_docs=False).all() if not include_docs: return domains else: return map(cls.wrap, iter_docs(cls.get_db(), [d['id'] for d in domains])) @classmethod def get_all_names(cls): return sorted({d['key'] for d in cls.get_all(include_docs=False)}) @classmethod def get_all_ids(cls): return [d['id'] for d in cls.get_all(include_docs=False)] @classmethod def get_names_by_prefix(cls, prefix): return [d['key'] for d in Domain.view( "domain/domains", startkey=prefix, endkey=prefix + "zzz", reduce=False, include_docs=False ).all()] + [d['key'] for d in Domain.view( "domain/deleted_domains", startkey=prefix, endkey=prefix + "zzz", reduce=False, include_docs=False ).all()] def case_sharing_included(self): return self.case_sharing or reduce(lambda x, y: x or y, [getattr(app, 'case_sharing', False) for app in self.applications()], False) def save(self, **params): from corehq.apps.domain.dbaccessors import domain_or_deleted_domain_exists self.last_modified = datetime.utcnow() if not self._rev: if domain_or_deleted_domain_exists(self.name): raise NameUnavailableException(self.name) # mark any new domain as timezone migration complete set_tz_migration_complete(self.name) super(Domain, self).save(**params) from corehq.apps.domain.signals import commcare_domain_post_save results = commcare_domain_post_save.send_robust(sender='domain', domain=self) log_signal_errors(results, "Error occurred during domain post_save (%s)", {'domain': self.name}) def snapshots(self, **view_kwargs): return Domain.view('domain/snapshots', startkey=[self._id, {}], endkey=[self._id], include_docs=True, reduce=False, descending=True, **view_kwargs ) def update_deployment(self, **kwargs): self.deployment.update(kwargs) self.save() def update_internal(self, **kwargs): self.internal.update(kwargs) self.save() def display_name(self): if self.is_snapshot: return "Snapshot of %s" % self.copied_from.display_name() return self.hr_name or self.name __str__ = display_name def get_license_display(self): return LICENSES.get(self.license) def get_license_url(self): return LICENSE_LINKS.get(self.license) def copies(self): return Domain.view('domain/copied_from_snapshot', key=self._id, include_docs=True) def copies_of_parent(self): return Domain.view('domain/copied_from_snapshot', keys=[s._id for s in self.copied_from.snapshots()], include_docs=True) def delete(self, leave_tombstone=False): if not leave_tombstone and not settings.UNIT_TESTING: raise ValueError( 'Cannot delete domain without leaving a tombstone except during testing') self._pre_delete() if leave_tombstone: domain = self.get(self._id) if not domain.doc_type.endswith('-Deleted'): domain.doc_type = '{}-Deleted'.format(domain.doc_type) domain.save() else: super().delete() # The save signals can undo effect of clearing the cache within the save # because they query the stale view (but attaches the up to date doc). # This is only a problem on delete/soft-delete, # because these change the presence in the index, not just the doc content. # Since this is rare, I'm opting to just re-clear the cache here # rather than making the signals use a strict lookup or something like that. self.clear_caches() def _pre_delete(self): from corehq.apps.domain.deletion import apply_deletion_operations # delete SQL models first because UCR tables are indexed by configs in couch apply_deletion_operations(self.name) # delete couch docs for db, related_doc_ids in get_all_doc_ids_for_domain_grouped_by_db(self.name): iter_bulk_delete(db, related_doc_ids, chunksize=500) @property @memoized def commtrack_settings(self): # this import causes some dependency issues so lives in here from corehq.apps.commtrack.models import SQLCommtrackConfig if self.commtrack_enabled: return SQLCommtrackConfig.for_domain(self.name) else: return None @property def has_custom_logo(self): return self.has_attachment(LOGO_ATTACHMENT) def get_custom_logo(self): if not self.has_custom_logo: return None return ( self.fetch_attachment(LOGO_ATTACHMENT), self.blobs[LOGO_ATTACHMENT].content_type ) def put_attachment(self, *args, **kw): return super(Domain, self).put_attachment(domain=self.name, *args, **kw) @property def location_types(self): from corehq.apps.locations.models import LocationType return LocationType.objects.filter(domain=self.name).all() @memoized def has_privilege(self, privilege): from corehq.apps.accounting.utils import domain_has_privilege return domain_has_privilege(self, privilege) @property @memoized def uses_locations(self): from corehq import privileges from corehq.apps.locations.models import LocationType return (self.has_privilege(privileges.LOCATIONS) and (self.commtrack_enabled or LocationType.objects.filter(domain=self.name).exists())) def convert_to_commtrack(self): """ One-stop-shop to make a domain CommTrack """ from corehq.apps.commtrack.util import make_domain_commtrack make_domain_commtrack(self) def clear_caches(self): from .utils import domain_restricts_superusers super(Domain, self).clear_caches() self.get_by_name.clear(self.__class__, self.name) self.is_secure_session_required.clear(self.name) self.secure_timeout.clear(self.name) domain_restricts_superusers.clear(self.name) def get_daily_outbound_sms_limit(self): if self.custom_daily_outbound_sms_limit: return self.custom_daily_outbound_sms_limit # https://manage.dimagi.com/default.asp?274299 return 50000
class DataSourceConfiguration(CachedCouchDocumentMixin, Document, AbstractUCRDataSource): """ A data source configuration. These map 1:1 with database tables that get created. Each data source can back an arbitrary number of reports. """ domain = StringProperty(required=True) engine_id = StringProperty(default=UCR_ENGINE_ID) backend_id = StringProperty(default=UCR_SQL_BACKEND) # no longer used referenced_doc_type = StringProperty(required=True) table_id = StringProperty(required=True) display_name = StringProperty() base_item_expression = DictProperty() configured_filter = DictProperty() configured_indicators = ListProperty() named_expressions = DictProperty() named_filters = DictProperty() meta = SchemaProperty(DataSourceMeta) is_deactivated = BooleanProperty(default=False) last_modified = DateTimeProperty() asynchronous = BooleanProperty(default=False) sql_column_indexes = SchemaListProperty(SQLColumnIndexes) disable_destructive_rebuild = BooleanProperty(default=False) sql_settings = SchemaProperty(SQLSettings) validations = SchemaListProperty(Validation) mirrored_engine_ids = ListProperty(default=[]) class Meta(object): # prevent JsonObject from auto-converting dates etc. string_conversions = () def __str__(self): return '{} - {}'.format(self.domain, self.display_name) def save(self, **params): self.last_modified = datetime.utcnow() super(DataSourceConfiguration, self).save(**params) @property def data_source_id(self): return self._id def filter(self, document, eval_context=None): if eval_context is None: eval_context = EvaluationContext(document) filter_fn = self._get_main_filter() return filter_fn(document, eval_context) def deleted_filter(self, document): filter_fn = self._get_deleted_filter() return filter_fn and filter_fn(document, EvaluationContext( document, 0)) @property def has_validations(self): return len(self.validations) > 0 def validate_document(self, document, eval_context=None): if eval_context is None: eval_context = EvaluationContext(document) errors = [] for validation in self._validations(): if validation.validation_function(document, eval_context) is False: errors.append((validation.name, validation.error_message)) if errors: raise ValidationError(errors) @memoized def _validations(self): return [ _Validation( validation.name, validation.error_message, FilterFactory.from_spec(validation.expression, context=self.get_factory_context())) for validation in self.validations ] @memoized def _get_main_filter(self): return self._get_filter([self.referenced_doc_type]) @memoized def _get_deleted_filter(self): return self._get_filter(get_deleted_doc_types( self.referenced_doc_type), include_configured=False) def _get_filter(self, doc_types, include_configured=True): if not doc_types: return None extras = ([self.configured_filter] if include_configured and self.configured_filter else []) built_in_filters = [ self._get_domain_filter_spec(), { 'type': 'or', 'filters': [{ "type": "boolean_expression", "expression": { "type": "property_name", "property_name": "doc_type", }, "operator": "eq", "property_value": doc_type, } for doc_type in doc_types], }, ] return FilterFactory.from_spec( { 'type': 'and', 'filters': built_in_filters + extras, }, context=self.get_factory_context(), ) def _get_domain_filter_spec(self): return { "type": "boolean_expression", "expression": { "type": "property_name", "property_name": "domain", }, "operator": "eq", "property_value": self.domain, } @property @memoized def named_expression_objects(self): named_expression_specs = deepcopy(self.named_expressions) named_expressions = {} spec_error = None while named_expression_specs: number_generated = 0 for name, expression in list(named_expression_specs.items()): try: named_expressions[name] = ExpressionFactory.from_spec( expression, FactoryContext(named_expressions=named_expressions, named_filters={})) number_generated += 1 del named_expression_specs[name] except BadSpecError as bad_spec_error: # maybe a nested name resolution issue, try again on the next pass spec_error = bad_spec_error if number_generated == 0 and named_expression_specs: # we unsuccessfully generated anything on this pass and there are still unresolved # references. we have to fail. assert spec_error is not None raise spec_error return named_expressions @property @memoized def named_filter_objects(self): return { name: FilterFactory.from_spec( filter, FactoryContext(self.named_expression_objects, {})) for name, filter in self.named_filters.items() } def get_factory_context(self): return FactoryContext(self.named_expression_objects, self.named_filter_objects) @property @memoized def default_indicators(self): default_indicators = [ IndicatorFactory.from_spec( { "column_id": "doc_id", "type": "expression", "display_name": "document id", "datatype": "string", "is_nullable": False, "is_primary_key": True, "expression": { "type": "root_doc", "expression": { "type": "property_name", "property_name": "_id" } } }, self.get_factory_context()) ] default_indicators.append( IndicatorFactory.from_spec({ "type": "inserted_at", }, self.get_factory_context())) if self.base_item_expression: default_indicators.append( IndicatorFactory.from_spec({ "type": "repeat_iteration", }, self.get_factory_context())) return default_indicators @property @memoized def indicators(self): return CompoundIndicator( self.display_name, self.default_indicators + [ IndicatorFactory.from_spec(indicator, self.get_factory_context()) for indicator in self.configured_indicators ], None, ) @property @memoized def parsed_expression(self): if self.base_item_expression: return ExpressionFactory.from_spec( self.base_item_expression, context=self.get_factory_context()) return None @memoized def get_columns(self): return self.indicators.get_columns() @property @memoized def columns_by_id(self): return {c.id: c for c in self.get_columns()} def get_column_by_id(self, column_id): return self.columns_by_id.get(column_id) def get_items(self, document, eval_context=None): if self.filter(document, eval_context): if not self.base_item_expression: return [document] else: result = self.parsed_expression(document, eval_context) if result is None: return [] elif isinstance(result, list): return result else: return [result] else: return [] def get_all_values(self, doc, eval_context=None): if not eval_context: eval_context = EvaluationContext(doc) if self.has_validations: try: self.validate_document(doc, eval_context) except ValidationError as e: for error in e.errors: InvalidUCRData.objects.get_or_create( doc_id=doc['_id'], indicator_config_id=self._id, validation_name=error[0], defaults={ 'doc_type': doc['doc_type'], 'domain': doc['domain'], 'validation_text': error[1], }) return [] rows = [] for item in self.get_items(doc, eval_context): indicators = self.indicators.get_values(item, eval_context) rows.append(indicators) eval_context.increment_iteration() return rows def get_report_count(self): """ Return the number of ReportConfigurations that reference this data source. """ return ReportConfiguration.count_by_data_source(self.domain, self._id) def validate_db_config(self): mirrored_engine_ids = self.mirrored_engine_ids if not mirrored_engine_ids: return if self.engine_id in mirrored_engine_ids: raise BadSpecError( "mirrored_engine_ids list should not contain engine_id") for engine_id in mirrored_engine_ids: if not connection_manager.engine_id_is_available(engine_id): raise BadSpecError( "DB for engine_id {} is not availble".format(engine_id)) if not connection_manager.resolves_to_unique_dbs(mirrored_engine_ids + [self.engine_id]): raise BadSpecError( "No two engine_ids should point to the same database") def validate(self, required=True): super(DataSourceConfiguration, self).validate(required) # these two properties implicitly call other validation self._get_main_filter() self._get_deleted_filter() # validate indicators and column uniqueness columns = [c.id for c in self.indicators.get_columns()] unique_columns = set(columns) if len(columns) != len(unique_columns): for column in set(columns): columns.remove(column) raise DuplicateColumnIdError(columns=columns) if self.referenced_doc_type not in VALID_REFERENCED_DOC_TYPES: raise BadSpecError( _('Report contains invalid referenced_doc_type: {}').format( self.referenced_doc_type)) self.parsed_expression self.pk_columns @classmethod def by_domain(cls, domain): return get_datasources_for_domain(domain) @classmethod def all_ids(cls): return [ res['id'] for res in cls.get_db().view( 'userreports/data_sources_by_build_info', reduce=False, include_docs=False) ] @classmethod def all(cls): for result in iter_docs(cls.get_db(), cls.all_ids()): yield cls.wrap(result) @property def is_static(self): return id_is_static(self._id) def deactivate(self, initiated_by=None): if not self.is_static: self.is_deactivated = True self.save() get_indicator_adapter(self).drop_table( initiated_by=initiated_by, source='deactivate-data-source') def get_case_type_or_xmlns_filter(self): """Returns a list of case types or xmlns from the filter of this data source. If this can't figure out the case types or xmlns's that filter, then returns [None] Currently always returns a list because it is called by a loop in _iteratively_build_table Could be reworked to return [] to be more pythonic """ if self.referenced_doc_type not in FILTER_INTERPOLATION_DOC_TYPES: return [None] property_name = FILTER_INTERPOLATION_DOC_TYPES[ self.referenced_doc_type] prop_value = self._filter_interploation_helper(self.configured_filter, property_name) return prop_value or [None] def _filter_interploation_helper(self, config_filter, property_name): filter_type = config_filter.get('type') if filter_type == 'and': sub_config_filters = [ self._filter_interploation_helper(f, property_name) for f in config_filter.get('filters') ] for filter_ in sub_config_filters: if filter_[0]: return filter_ if filter_type != 'boolean_expression': return [None] if config_filter['operator'] not in ('eq', 'in'): return [None] expression = config_filter['expression'] if expression['type'] == 'property_name' and expression[ 'property_name'] == property_name: prop_value = config_filter['property_value'] if not isinstance(prop_value, list): prop_value = [prop_value] return prop_value return [None] @property def pk_columns(self): columns = [] for col in self.get_columns(): if col.is_primary_key: column_name = decode_column_name(col) columns.append(column_name) if self.sql_settings.primary_key: if set(columns) != set(self.sql_settings.primary_key): raise BadSpecError( "Primary key columns must have is_primary_key set to true", self.data_source_id) columns = self.sql_settings.primary_key return columns
class ReportConfiguration(QuickCachedDocumentMixin, Document): """ A report configuration. These map 1:1 with reports that show up in the UI. """ domain = StringProperty(required=True) visible = BooleanProperty(default=True) # config_id of the datasource config_id = StringProperty(required=True) data_source_type = StringProperty( default=DATA_SOURCE_TYPE_STANDARD, choices=[DATA_SOURCE_TYPE_STANDARD, DATA_SOURCE_TYPE_AGGREGATE]) title = StringProperty() description = StringProperty() aggregation_columns = StringListProperty() filters = ListProperty() columns = ListProperty() configured_charts = ListProperty() sort_expression = ListProperty() soft_rollout = DecimalProperty(default=0) # no longer used report_meta = SchemaProperty(ReportMeta) custom_query_provider = StringProperty(required=False) def __str__(self): return '{} - {}'.format(self.domain, self.title) def save(self, *args, **kwargs): self.report_meta.last_modified = datetime.utcnow() super(ReportConfiguration, self).save(*args, **kwargs) @property @memoized def filters_without_prefilters(self): return [f for f in self.filters if f['type'] != 'pre'] @property @memoized def prefilters(self): return [f for f in self.filters if f['type'] == 'pre'] @property @memoized def config(self): return get_datasource_config(self.config_id, self.domain, self.data_source_type)[0] @property @memoized def report_columns(self): return [ ReportColumnFactory.from_spec(c, self.is_static) for c in self.columns ] @property @memoized def ui_filters(self): return [ReportFilterFactory.from_spec(f, self) for f in self.filters] @property @memoized def charts(self): return [ChartFactory.from_spec(g._obj) for g in self.configured_charts] @property @memoized def location_column_id(self): cols = [col for col in self.report_columns if col.type == 'location'] if cols: return cols[0].column_id @property def map_config(self): def map_col(column): if column['column_id'] != self.location_column_id: return { 'column_id': column['column_id'], 'label': column['display'] } if self.location_column_id: return { 'location_column_id': self.location_column_id, 'layer_name': { 'XFormInstance': _('Forms'), 'CommCareCase': _('Cases') }.get(self.config.referenced_doc_type, "Layer"), 'columns': [x for x in (map_col(col) for col in self.columns) if x] } @property @memoized def sort_order(self): return [ ReportOrderByFactory.from_spec(e) for e in self.sort_expression ] @property def table_id(self): return self.config.table_id def get_ui_filter(self, filter_slug): for filter in self.ui_filters: if filter.name == filter_slug: return filter return None def get_languages(self): """ Return the languages used in this report's column and filter display properties. Note that only explicitly identified languages are returned. So, if the display properties are all strings, "en" would not be returned. """ langs = set() for item in self.columns + self.filters: if isinstance(item['display'], dict): langs |= set(item['display'].keys()) return langs def validate(self, required=True): from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource def _check_for_duplicates(supposedly_unique_list, error_msg): # http://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-python-list duplicate_items = set([ item for item in supposedly_unique_list if supposedly_unique_list.count(item) > 1 ]) if len(duplicate_items) > 0: raise BadSpecError( _(error_msg).format(', '.join(sorted(duplicate_items)))) super(ReportConfiguration, self).validate(required) # check duplicates before passing to factory since it chokes on them _check_for_duplicates( [FilterSpec.wrap(f).slug for f in self.filters], 'Filters cannot contain duplicate slugs: {}', ) _check_for_duplicates( [ column_id for c in self.report_columns for column_id in c.get_column_ids() ], 'Columns cannot contain duplicate column_ids: {}', ) # these calls all implicitly do validation ConfigurableReportDataSource.from_spec(self) self.ui_filters self.charts self.sort_order @classmethod @quickcache(['cls.__name__', 'domain']) def by_domain(cls, domain): return get_report_configs_for_domain(domain) @classmethod @quickcache(['cls.__name__', 'domain', 'data_source_id']) def count_by_data_source(cls, domain, data_source_id): return get_number_of_report_configs_by_data_source( domain, data_source_id) def clear_caches(self): super(ReportConfiguration, self).clear_caches() self.by_domain.clear(self.__class__, self.domain) self.count_by_data_source.clear(self.__class__, self.domain, self.config_id) @property def is_static(self): return report_config_id_is_static(self._id)
class SQLSettings(DocumentSchema): partition_config = SchemaListProperty(SQLPartition) citus_config = SchemaProperty(CitusConfig) primary_key = ListProperty()
class DataSourceMeta(DocumentSchema): build = SchemaProperty(DataSourceBuildInformation)
class ObservationMapping(DocumentSchema): concept = StringProperty() value = SchemaProperty(ValueSource)
class DataSourceConfiguration(UnicodeMixIn, CachedCouchDocumentMixin, Document): """ A data source configuration. These map 1:1 with database tables that get created. Each data source can back an arbitrary number of reports. """ domain = StringProperty(required=True) referenced_doc_type = StringProperty(required=True) table_id = StringProperty(required=True) display_name = StringProperty() base_item_expression = DictProperty() configured_filter = DictProperty() configured_indicators = ListProperty() named_filters = DictProperty() meta = SchemaProperty(DataSourceMeta) def __unicode__(self): return u'{} - {}'.format(self.domain, self.display_name) def filter(self, document): filter_fn = self._get_main_filter() return filter_fn(document, EvaluationContext(document, 0)) def deleted_filter(self, document): filter_fn = self._get_deleted_filter() return filter_fn and filter_fn(document, EvaluationContext(document, 0)) @memoized def _get_main_filter(self): return self._get_filter([self.referenced_doc_type]) @memoized def _get_deleted_filter(self): return self._get_filter(get_deleted_doc_types(self.referenced_doc_type)) def _get_filter(self, doc_types): if not doc_types: return None extras = ( [self.configured_filter] if self.configured_filter else [] ) built_in_filters = [ self._get_domain_filter_spec(), { 'type': 'or', 'filters': [ { 'type': 'property_match', 'property_name': 'doc_type', 'property_value': doc_type, } for doc_type in doc_types ], }, ] return FilterFactory.from_spec( { 'type': 'and', 'filters': built_in_filters + extras, }, context=self.named_filter_objects, ) def _get_domain_filter_spec(self): return { 'type': 'property_match', 'property_name': 'domain', 'property_value': self.domain, } @property @memoized def named_filter_objects(self): return {name: FilterFactory.from_spec(filter, {}) for name, filter in self.named_filters.items()} @property def indicators(self): default_indicators = [IndicatorFactory.from_spec({ "column_id": "doc_id", "type": "expression", "display_name": "document id", "datatype": "string", "is_nullable": False, "is_primary_key": True, "expression": { "type": "root_doc", "expression": { "type": "property_name", "property_name": "_id" } } }, self.named_filter_objects)] default_indicators.append(IndicatorFactory.from_spec({ "type": "inserted_at", }, self.named_filter_objects)) if self.base_item_expression: default_indicators.append(IndicatorFactory.from_spec({ "type": "repeat_iteration", }, self.named_filter_objects)) return CompoundIndicator( self.display_name, default_indicators + [ IndicatorFactory.from_spec(indicator, self.named_filter_objects) for indicator in self.configured_indicators ] ) @property def parsed_expression(self): if self.base_item_expression: return ExpressionFactory.from_spec(self.base_item_expression, context=self.named_filter_objects) return None def get_columns(self): return self.indicators.get_columns() def get_items(self, document): if self.filter(document): if not self.base_item_expression: return [document] else: result = self.parsed_expression(document) if result is None: return [] elif isinstance(result, list): return result else: return [result] else: return [] def get_all_values(self, doc): return [ self.indicators.get_values(item, EvaluationContext(doc, i)) for i, item in enumerate(self.get_items(doc)) ] def get_report_count(self): """ Return the number of ReportConfigurations that reference this data source. """ return get_number_of_report_configs_by_data_source(self.domain, self._id) def validate(self, required=True): super(DataSourceConfiguration, self).validate(required) # these two properties implicitly call other validation self._get_main_filter() self._get_deleted_filter() self.indicators self.parsed_expression @classmethod def by_domain(cls, domain): return sorted( cls.view( 'userreports/data_sources_by_build_info', start_key=[domain], end_key=[domain, {}], reduce=False, include_docs=True ), key=lambda config: config.display_name ) @classmethod def all(cls): ids = [res['id'] for res in cls.get_db().view('userreports/data_sources_by_build_info', reduce=False, include_docs=False)] for result in iter_docs(cls.get_db(), ids): yield cls.wrap(result)
class FormDataValueMap(DocumentSchema): value = SchemaProperty(ValueSource) data_element_id = StringProperty(required=True)
class Dhis2Repeater(FormRepeater): class Meta(object): app_label = 'repeaters' include_app_id_param = False friendly_name = _("Forward Forms to DHIS2 as Anonymous Events") payload_generator_classes = (FormRepeaterJsonPayloadGenerator, ) dhis2_config = SchemaProperty(Dhis2Config) _has_config = True def __eq__(self, other): return (isinstance(other, self.__class__) and self.get_id == other.get_id) def __hash__(self): return hash(self.get_id) @memoized def payload_doc(self, repeat_record): return FormAccessors(repeat_record.domain).get_form( repeat_record.payload_id) @property def form_class_name(self): """ The class name used to determine which edit form to use """ return self.__class__.__name__ @classmethod def available_for_domain(cls, domain): return DHIS2_INTEGRATION.enabled(domain) def get_payload(self, repeat_record): payload = super(Dhis2Repeater, self).get_payload(repeat_record) return json.loads(payload) def send_request(self, repeat_record, payload): """ Sends API request and returns response if ``payload`` is a form that is configured to be forwarded to DHIS2. If ``payload`` is a form that isn't configured to be forwarded, returns True. """ requests = Requests( self.domain, self.url, self.username, self.plaintext_password, verify=self.verify, ) for form_config in self.dhis2_config.form_configs: if form_config.xmlns == payload['form']['@xmlns']: return send_dhis2_event( requests, form_config, payload, ) return True
class Dhis2EntityRepeater(CaseRepeater, Dhis2Instance): class Meta(object): app_label = 'repeaters' include_app_id_param = False friendly_name = _("Forward Cases as DHIS2 Tracked Entities") payload_generator_classes = (FormRepeaterJsonPayloadGenerator, ) dhis2_entity_config = SchemaProperty(Dhis2EntityConfig) _has_config = True def __str__(self): return Repeater.__str__(self) def allowed_to_forward(self, payload): # If the payload is the system form for updating a case with its # DHIS2 TEI ID then don't send it back. return payload.xmlns != XMLNS_DHIS2 @memoized def payload_doc(self, repeat_record): return FormAccessors(repeat_record.domain).get_form( repeat_record.payload_id) @property def form_class_name(self): return self.__class__.__name__ @classmethod def available_for_domain(cls, domain): return DHIS2_INTEGRATION.enabled(domain) def get_payload(self, repeat_record): payload = super().get_payload(repeat_record) return json.loads(payload) def send_request(self, repeat_record, payload): # Notify admins if API version is not supported self.get_api_version() value_source_configs = [] for case_config in self.dhis2_entity_config.case_configs: value_source_configs.append(case_config.org_unit_id) value_source_configs.append(case_config.tei_id) for value_source_config in case_config.attributes.values(): value_source_configs.append(value_source_config) case_trigger_infos = get_relevant_case_updates_from_form_json( self.domain, payload, case_types=self.white_listed_case_types, extra_fields=[ c['case_property'] for c in value_source_configs if 'case_property' in c ], form_question_values=get_form_question_values(payload), ) requests = get_requests(self, repeat_record.payload_id) try: return send_dhis2_entities(requests, self, case_trigger_infos) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() tb_lines = traceback.format_exception(exc_type, exc_value, exc_traceback) requests.notify_error( f"Error sending Entities to {self}: {exc_value!r}", details="".join(tb_lines)) raise
class SimplifiedSyncLog(AbstractSyncLog): """ New, simplified sync log class that is used by ownership cleanliness restore. Just maintains a flat list of case IDs on the phone rather than the case/dependent state lists from the SyncLog class. """ log_format = StringProperty(default=LOG_FORMAT_SIMPLIFIED) case_ids_on_phone = SetProperty(six.text_type) # this is a subset of case_ids_on_phone used to flag that a case is only around because it has dependencies # this allows us to purge it if possible from other actions dependent_case_ids_on_phone = SetProperty(six.text_type) owner_ids_on_phone = SetProperty(six.text_type) index_tree = SchemaProperty(IndexTree) # index tree of subcases / children extension_index_tree = SchemaProperty( IndexTree) # index tree of extensions closed_cases = SetProperty(six.text_type) extensions_checked = BooleanProperty(default=False) device_id = StringProperty() auth_type = StringProperty() _purged_cases = None @property def purged_cases(self): if self._purged_cases is None: self._purged_cases = set() return self._purged_cases @property def is_formplayer(self): return self.device_id and self.device_id.startswith("WebAppsLogin") def case_count(self): return len(self.case_ids_on_phone) def phone_is_holding_case(self, case_id): """ Whether the phone currently has a case, according to this sync log """ return case_id in self.case_ids_on_phone def get_footprint_of_cases_on_phone(self): return list(self.case_ids_on_phone) @property def primary_case_ids(self): return self.case_ids_on_phone - self.dependent_case_ids_on_phone def purge(self, case_id, xform_id=None): """ This happens in 3 phases, and recursively tries to purge outgoing indices of purged cases. Definitions: ----------- A case is *relevant* if: - it is open and owned or, - it has a relevant child or, - it has a relevant extension or, - it is the extension of a relevant case. A case is *available* if: - it is open and not an extension case or, - it is open and is the extension of an available case. A case is *live* if: - it is owned and available or, - it has a live child or, - it has a live extension or, - it is the extension of a live case. Algorithm: ---------- 1. Mark *relevant* cases Mark all open cases owned by the user relevant. Traversing all outgoing child and extension indexes, as well as all incoming extension indexes, mark all touched cases relevant. 2. Mark *available* cases Mark all relevant cases that are open and have no outgoing extension indexes as available. Traverse incoming extension indexes which don't lead to closed cases, mark all touched cases as available. 3. Mark *live* cases Mark all relevant, owned, available cases as live. Traverse incoming extension indexes which don't lead to closed cases, mark all touched cases as live. """ _get_logger().debug("purging: {}".format(case_id)) self.dependent_case_ids_on_phone.add(case_id) relevant = self._get_relevant_cases(case_id) available = self._get_available_cases(relevant) live = self._get_live_cases(available) to_remove = (relevant - self.purged_cases) - live self._remove_cases_purge_indices(to_remove, case_id, xform_id) def _get_relevant_cases(self, case_id): """ Mark all open cases owned by the user relevant. Traversing all outgoing child and extension indexes, as well as all incoming extension indexes, mark all touched cases relevant. """ relevant = IndexTree.get_all_dependencies( case_id, child_index_tree=self.index_tree, extension_index_tree=self.extension_index_tree, ) _get_logger().debug("Relevant cases of {}: {}".format( case_id, relevant)) return relevant def _get_available_cases(self, relevant): """ Mark all relevant cases that are open and have no outgoing extension indexes as available. Traverse incoming extension indexes which don't lead to closed cases, mark all touched cases as available """ incoming_extensions = self.extension_index_tree.reverse_indices available = { case for case in relevant if case not in self.closed_cases and ( not self.extension_index_tree.indices.get(case) or self.index_tree.indices.get(case)) } new_available = set() | available while new_available: case_to_check = new_available.pop() for incoming_extension in incoming_extensions.get( case_to_check, []): closed = incoming_extension in self.closed_cases purged = incoming_extension in self.purged_cases if not closed and not purged: new_available.add(incoming_extension) available = available | new_available _get_logger().debug("Available cases: {}".format(available)) return available def _get_live_cases(self, available): """ Mark all relevant, owned, available cases as live. Traverse incoming extension indexes which don't lead to closed cases, mark all touched cases as available. """ primary_case_ids = self.primary_case_ids live = available & primary_case_ids new_live = set() | live checked = set() while new_live: case_to_check = new_live.pop() checked.add(case_to_check) new_live = new_live | IndexTree.get_all_outgoing_cases( case_to_check, self.index_tree, self.extension_index_tree) - self.purged_cases new_live = new_live | IndexTree.traverse_incoming_extensions( case_to_check, self.extension_index_tree, frozenset(self.closed_cases), ) - self.purged_cases new_live = new_live - checked live = live | new_live _get_logger().debug("live cases: {}".format(live)) return live def _remove_cases_purge_indices(self, all_to_remove, checked_case_id, xform_id): """Remove all cases marked for removal. Traverse child cases and try to purge those too.""" _get_logger().debug("cases to to_remove: {}".format(all_to_remove)) for to_remove in all_to_remove: indices = self.index_tree.indices.get(to_remove, {}) self._remove_case(to_remove, all_to_remove, checked_case_id, xform_id) for referenced_case in indices.values(): is_dependent_case = referenced_case in self.dependent_case_ids_on_phone already_primed_for_removal = referenced_case in all_to_remove if is_dependent_case and not already_primed_for_removal and referenced_case != checked_case_id: self.purge(referenced_case, xform_id) def _remove_case(self, to_remove, all_to_remove, checked_case_id, xform_id): """Removes case from index trees, case_ids_on_phone and dependent_case_ids_on_phone if pertinent""" _get_logger().debug('removing: {}'.format(to_remove)) deleted_indices = self.index_tree.indices.pop(to_remove, {}) deleted_indices.update( self.extension_index_tree.indices.pop(to_remove, {})) self._validate_case_removal(to_remove, all_to_remove, deleted_indices, checked_case_id, xform_id) try: self.case_ids_on_phone.remove(to_remove) except KeyError: should_fail_softly = not xform_id or _domain_has_legacy_toggle_set( ) if should_fail_softly: pass else: # this is only a soft assert for now because of http://manage.dimagi.com/default.asp?181443 # we should convert back to a real Exception when we stop getting any of these _assert = soft_assert(notify_admins=True, exponential_backoff=False) _assert( False, 'case already remove from synclog', { 'case_id': to_remove, 'synclog_id': self._id, 'form_id': xform_id }) else: self.purged_cases.add(to_remove) if to_remove in self.dependent_case_ids_on_phone: self.dependent_case_ids_on_phone.remove(to_remove) def _validate_case_removal(self, case_to_remove, all_to_remove, deleted_indices, checked_case_id, xform_id): """Traverse immediate outgoing indices. Validate that these are also candidates for removal.""" if case_to_remove == checked_case_id: return # Logging removed temporarily: https://github.com/dimagi/commcare-hq/pull/16259#issuecomment-303176217 # for index in deleted_indices.values(): # if xform_id and not _domain_has_legacy_toggle_set(): # # unblocking http://manage.dimagi.com/default.asp?185850 # _assert = soft_assert(send_to_ops=False, log_to_file=True, exponential_backoff=True, # fail_if_debug=True) # _assert(index in (all_to_remove | set([checked_case_id])), # "expected {} in {} but wasn't".format(index, all_to_remove)) def _add_primary_case(self, case_id): self.case_ids_on_phone.add(case_id) if case_id in self.dependent_case_ids_on_phone: self.dependent_case_ids_on_phone.remove(case_id) def _add_index(self, index, case_update): _get_logger().debug('adding index {} --<{}>--> {} ({}).'.format( index.case_id, index.relationship, index.referenced_id, index.identifier)) if index.relationship == const.CASE_INDEX_EXTENSION: self._add_extension_index(index, case_update) else: self._add_child_index(index) def _add_extension_index(self, index, case_update): assert index.relationship == const.CASE_INDEX_EXTENSION self.extension_index_tree.set_index(index.case_id, index.identifier, index.referenced_id) case_child_indices = [ idx for idx in case_update.indices_to_add if idx.relationship == const.CASE_INDEX_CHILD and idx.referenced_id == index.referenced_id ] if not case_child_indices and not case_update.is_live: # this case doesn't also have child indices, and it is not owned, so it is dependent self.dependent_case_ids_on_phone.add(index.case_id) def _add_child_index(self, index): assert index.relationship == const.CASE_INDEX_CHILD self.index_tree.set_index(index.case_id, index.identifier, index.referenced_id) def _delete_index(self, index): self.index_tree.delete_index(index.case_id, index.identifier) self.extension_index_tree.delete_index(index.case_id, index.identifier) def update_phone_lists(self, xform, case_list): made_changes = False _get_logger().debug('updating sync log for {}'.format(self.user_id)) _get_logger().debug('case ids before update: {}'.format(', '.join( self.case_ids_on_phone))) _get_logger().debug('dependent case ids before update: {}'.format( ', '.join(self.dependent_case_ids_on_phone))) _get_logger().debug('index tree before update: {}'.format( self.index_tree)) _get_logger().debug('extension index tree before update: {}'.format( self.extension_index_tree)) class CaseUpdate(object): def __init__(self, case_id, owner_ids_on_phone): self.case_id = case_id self.owner_ids_on_phone = owner_ids_on_phone self.was_live_previously = True self.final_owner_id = None self.is_closed = None self.indices_to_add = [] self.indices_to_delete = [] @property def extension_indices_to_add(self): return [ index for index in self.indices_to_add if index.relationship == const.CASE_INDEX_EXTENSION ] def has_extension_indices_to_add(self): return len(self.extension_indices_to_add) > 0 @property def is_live(self): """returns whether an update is live for a specifc set of owner_ids""" if self.is_closed: return False elif self.final_owner_id is None: # we likely didn't touch owner_id so just default to whatever it was previously return self.was_live_previously else: return self.final_owner_id in self.owner_ids_on_phone ShortIndex = namedtuple( 'ShortIndex', ['case_id', 'identifier', 'referenced_id', 'relationship']) # this is a variable used via closures in the function below owner_id_map = {} def get_latest_owner_id(case_id, action=None): # "latest" just means as this forms actions are played through if action is not None: owner_id_from_action = action.updated_known_properties.get( "owner_id") if owner_id_from_action is not None: owner_id_map[case_id] = owner_id_from_action return owner_id_map.get(case_id, None) all_updates = {} for case in case_list: if case.case_id not in all_updates: _get_logger().debug('initializing update for case {}'.format( case.case_id)) all_updates[case.case_id] = CaseUpdate( case_id=case.case_id, owner_ids_on_phone=self.owner_ids_on_phone) case_update = all_updates[case.case_id] case_update.was_live_previously = case.case_id in self.primary_case_ids actions = case.get_actions_for_form(xform) for action in actions: _get_logger().debug('{}: {}'.format(case.case_id, action.action_type)) owner_id = get_latest_owner_id(case.case_id, action) if owner_id is not None: case_update.final_owner_id = owner_id if action.action_type == const.CASE_ACTION_INDEX: for index in action.indices: if index.referenced_id: case_update.indices_to_add.append( ShortIndex(case.case_id, index.identifier, index.referenced_id, index.relationship)) else: case_update.indices_to_delete.append( ShortIndex(case.case_id, index.identifier, None, None)) elif action.action_type == const.CASE_ACTION_CLOSE: case_update.is_closed = True non_live_updates = [] for case in case_list: case_update = all_updates[case.case_id] if case_update.is_live: _get_logger().debug('case {} is live.'.format( case_update.case_id)) if case.case_id not in self.case_ids_on_phone: self._add_primary_case(case.case_id) made_changes = True elif case.case_id in self.dependent_case_ids_on_phone: self.dependent_case_ids_on_phone.remove(case.case_id) made_changes = True for index in case_update.indices_to_add: self._add_index(index, case_update) made_changes = True for index in case_update.indices_to_delete: self._delete_index(index) made_changes = True
class Dhis2Repeater(FormRepeater, Dhis2Instance): class Meta(object): app_label = 'repeaters' include_app_id_param = False friendly_name = _("Forward Forms to DHIS2 as Anonymous Events") payload_generator_classes = (FormRepeaterJsonPayloadGenerator, ) dhis2_config = SchemaProperty(Dhis2Config) _has_config = True def __str__(self): return Repeater.__str__(self) def __eq__(self, other): return (isinstance(other, self.__class__) and self.get_id == other.get_id) def __hash__(self): return hash(self.get_id) @memoized def payload_doc(self, repeat_record): return FormAccessors(repeat_record.domain).get_form( repeat_record.payload_id) @property def form_class_name(self): """ The class name used to determine which edit form to use """ return self.__class__.__name__ @classmethod def available_for_domain(cls, domain): return DHIS2_INTEGRATION.enabled(domain) def get_payload(self, repeat_record): payload = super(Dhis2Repeater, self).get_payload(repeat_record) return json.loads(payload) def send_request(self, repeat_record, payload): """ Sends API request and returns response if ``payload`` is a form that is configured to be forwarded to DHIS2. If ``payload`` is a form that isn't configured to be forwarded, returns True. """ # Notify admins if API version is not supported self.get_api_version() requests = get_requests(self, repeat_record.payload_id) for form_config in self.dhis2_config.form_configs: if form_config.xmlns == payload['form']['@xmlns']: try: return send_dhis2_event( requests, form_config, payload, ) except (RequestException, HTTPError, ConfigurationError) as err: requests.notify_error( f"Error sending Events to {self}: {err}") raise return True
class OpenmrsRepeater(CaseRepeater): class Meta(object): app_label = 'repeaters' include_app_id_param = False friendly_name = _("Forward to OpenMRS") payload_generator_classes = (FormRepeaterJsonPayloadGenerator, ) location_id = StringProperty(default='') openmrs_config = SchemaProperty(OpenmrsConfig) _has_config = True # self.white_listed_case_types must have exactly one case type set # for Atom feed integration to add cases for OpenMRS patients. # self.location_id must be set to determine their case owner. The # owner is set to the first CommCareUser instance found at that # location. atom_feed_enabled = BooleanProperty(default=False) atom_feed_status = SchemaDictProperty(AtomFeedStatus) def __init__(self, *args, **kwargs): super(OpenmrsRepeater, self).__init__(*args, **kwargs) def __eq__(self, other): return (isinstance(other, self.__class__) and self.get_id == other.get_id) @classmethod def wrap(cls, data): if 'atom_feed_last_polled_at' in data: data['atom_feed_status'] = { ATOM_FEED_NAME_PATIENT: { 'last_polled_at': data.pop('atom_feed_last_polled_at'), 'last_page': data.pop('atom_feed_last_page', None), } } return super(OpenmrsRepeater, cls).wrap(data) @cached_property def requests(self): return Requests(self.domain, self.url, self.username, self.plaintext_password, verify=self.verify) @cached_property def observation_mappings(self): obs_mappings = defaultdict(list) for form_config in self.openmrs_config.form_configs: for obs_mapping in form_config.openmrs_observations: if obs_mapping.value.check_direction( DIRECTION_IMPORT) and obs_mapping.case_property: obs_mappings[obs_mapping.concept].append(obs_mapping) return obs_mappings @memoized def payload_doc(self, repeat_record): return FormAccessors(repeat_record.domain).get_form( repeat_record.payload_id) @property def form_class_name(self): """ The class name used to determine which edit form to use """ return self.__class__.__name__ @classmethod def available_for_domain(cls, domain): return OPENMRS_INTEGRATION.enabled(domain) def allowed_to_forward(self, payload): """ Forward the payload if ... * it did not come from OpenMRS, and * CaseRepeater says it's OK for the case types and users of any of the payload's cases, and * this repeater forwards to the right OpenMRS server for any of the payload's cases. :param payload: An XFormInstance (not a case) """ if payload.xmlns == XMLNS_OPENMRS: # payload came from OpenMRS. Don't send it back. return False case_blocks = extract_case_blocks(payload) case_ids = [case_block['@case_id'] for case_block in case_blocks] cases = CaseAccessors(payload.domain).get_cases(case_ids, ordered=True) if not any( CaseRepeater.allowed_to_forward(self, case) for case in cases): # If none of the case updates in the payload are allowed to # be forwarded, drop it. return False if not self.location_id: # If this repeater does not have a location, all payloads # should go to it. return True repeaters = [ repeater for case in cases for repeater in get_case_location_ancestor_repeaters(case) ] # If this repeater points to the wrong OpenMRS server for this # payload then let the right repeater handle it. return self in repeaters def get_payload(self, repeat_record): payload = super(OpenmrsRepeater, self).get_payload(repeat_record) return json.loads(payload) def send_request(self, repeat_record, payload): value_sources = chain( self.openmrs_config.case_config.patient_identifiers.values(), self.openmrs_config.case_config.person_properties.values(), self.openmrs_config.case_config.person_preferred_name.values(), self.openmrs_config.case_config.person_preferred_address.values(), self.openmrs_config.case_config.person_attributes.values(), ) case_trigger_infos = get_relevant_case_updates_from_form_json( self.domain, payload, case_types=self.white_listed_case_types, extra_fields=[ vs.case_property for vs in value_sources if hasattr(vs, 'case_property') ]) form_question_values = get_form_question_values(payload) return send_openmrs_data(self.requests, self.domain, payload, self.openmrs_config, case_trigger_infos, form_question_values)
class Spam(DocumentSchema): ham = Ham(required=False) ham_prop = SchemaProperty(Ham, required=False) ham_dict_prop = SchemaDictProperty(Ham, required=False)
class Domain(QuickCachedDocumentMixin, Document, SnapshotMixin): """Domain is the highest level collection of people/stuff in the system. Pretty much everything happens at the domain-level, including user membership, permission to see data, reports, charts, etc.""" name = StringProperty() is_active = BooleanProperty() date_created = DateTimeProperty() default_timezone = StringProperty(default=getattr(settings, "TIME_ZONE", "UTC")) case_sharing = BooleanProperty(default=False) secure_submissions = BooleanProperty(default=False) cloudcare_releases = StringProperty(choices=['stars', 'nostars', 'default'], default='default') organization = StringProperty() hr_name = StringProperty() # the human-readable name for this project creating_user = StringProperty() # username of the user who created this domain # domain metadata project_type = StringProperty() # e.g. MCH, HIV customer_type = StringProperty() # plus, full, etc. is_test = StringProperty(choices=["true", "false", "none"], default="none") description = StringProperty() short_description = StringProperty() is_shared = BooleanProperty(default=False) commtrack_enabled = BooleanProperty(default=False) call_center_config = SchemaProperty(CallCenterProperties) has_careplan = BooleanProperty(default=False) restrict_superusers = BooleanProperty(default=False) allow_domain_requests = BooleanProperty(default=False) location_restriction_for_users = BooleanProperty(default=False) usercase_enabled = BooleanProperty(default=False) hipaa_compliant = BooleanProperty(default=False) use_sql_backend = BooleanProperty(default=False) case_display = SchemaProperty(CaseDisplaySettings) # CommConnect settings commconnect_enabled = BooleanProperty(default=False) survey_management_enabled = BooleanProperty(default=False) # Whether or not a case can register via sms sms_case_registration_enabled = BooleanProperty(default=False) # Case type to apply to cases registered via sms sms_case_registration_type = StringProperty() # Owner to apply to cases registered via sms sms_case_registration_owner_id = StringProperty() # Submitting user to apply to cases registered via sms sms_case_registration_user_id = StringProperty() # Whether or not a mobile worker can register via sms sms_mobile_worker_registration_enabled = BooleanProperty(default=False) use_default_sms_response = BooleanProperty(default=False) default_sms_response = StringProperty() chat_message_count_threshold = IntegerProperty() custom_chat_template = StringProperty() # See settings.CUSTOM_CHAT_TEMPLATES custom_case_username = StringProperty() # Case property to use when showing the case's name in a chat window # If empty, sms can be sent at any time. Otherwise, only send during # these windows of time. SMS_QUEUE_ENABLED must be True in localsettings # for this be considered. restricted_sms_times = SchemaListProperty(DayTimeWindow) # If empty, this is ignored. Otherwise, the framework will make sure # that during these days/times, no automated outbound sms will be sent # to someone if they have sent in an sms within sms_conversation_length # minutes. Outbound sms sent from a user in a chat window, however, will # still be sent. This is meant to prevent chat conversations from being # interrupted by automated sms reminders. # SMS_QUEUE_ENABLED must be True in localsettings for this to be # considered. sms_conversation_times = SchemaListProperty(DayTimeWindow) # In minutes, see above. sms_conversation_length = IntegerProperty(default=10) # Set to True to prevent survey questions and answers form being seen in # SMS chat windows. filter_surveys_from_chat = BooleanProperty(default=False) # The below option only matters if filter_surveys_from_chat = True. # If set to True, invalid survey responses will still be shown in the chat # window, while questions and valid responses will be filtered out. show_invalid_survey_responses_in_chat = BooleanProperty(default=False) # If set to True, if a message is read by anyone it counts as being read by # everyone. Set to False so that a message is only counted as being read # for a user if only that user has read it. count_messages_as_read_by_anyone = BooleanProperty(default=False) # Set to True to allow sending sms and all-label surveys to cases whose # phone number is duplicated with another contact send_to_duplicated_case_numbers = BooleanProperty(default=True) enable_registration_welcome_sms_for_case = BooleanProperty(default=False) enable_registration_welcome_sms_for_mobile_worker = BooleanProperty(default=False) sms_survey_date_format = StringProperty() # exchange/domain copying stuff is_snapshot = BooleanProperty(default=False) is_approved = BooleanProperty(default=False) snapshot_time = DateTimeProperty() published = BooleanProperty(default=False) license = StringProperty(choices=LICENSES, default='cc') title = StringProperty() cda = SchemaProperty(LicenseAgreement) multimedia_included = BooleanProperty(default=True) downloads = IntegerProperty(default=0) # number of downloads for this specific snapshot full_downloads = IntegerProperty(default=0) # number of downloads for all snapshots from this domain author = StringProperty() phone_model = StringProperty() attribution_notes = StringProperty() publisher = StringProperty(choices=["organization", "user"], default="user") yt_id = StringProperty() snapshot_head = BooleanProperty(default=False) deployment = SchemaProperty(Deployment) image_path = StringProperty() image_type = StringProperty() cached_properties = DictProperty() internal = SchemaProperty(InternalProperties) dynamic_reports = SchemaListProperty(DynamicReportSet) # extra user specified properties tags = StringListProperty() area = StringProperty(choices=AREA_CHOICES) sub_area = StringProperty(choices=SUB_AREA_CHOICES) launch_date = DateTimeProperty # to be eliminated from projects and related documents when they are copied for the exchange _dirty_fields = ('admin_password', 'admin_password_charset', 'city', 'countries', 'region', 'customer_type') last_modified = DateTimeProperty(default=datetime(2015, 1, 1)) # when turned on, use SECURE_TIMEOUT for sessions of users who are members of this domain secure_sessions = BooleanProperty(default=False) two_factor_auth = BooleanProperty(default=False) strong_mobile_passwords = BooleanProperty(default=False) # There is no longer a way to request a report builder trial, so this property should be removed in the near # future. (Keeping it for now in case a user has requested a trial and but has not yet been granted it) requested_report_builder_trial = StringListProperty() requested_report_builder_subscription = StringListProperty() @classmethod def wrap(cls, data): # for domains that still use original_doc should_save = False if 'original_doc' in data: original_doc = data['original_doc'] del data['original_doc'] should_save = True if original_doc: original_doc = Domain.get_by_name(original_doc) data['copy_history'] = [original_doc._id] # for domains that have a public domain license if 'license' in data: if data.get("license", None) == "public": data["license"] = "cc" should_save = True if 'slug' in data and data["slug"]: data["hr_name"] = data["slug"] del data["slug"] if 'is_test' in data and isinstance(data["is_test"], bool): data["is_test"] = "true" if data["is_test"] else "false" should_save = True if 'cloudcare_releases' not in data: data['cloudcare_releases'] = 'nostars' # legacy default setting # Don't actually remove location_types yet. We can migrate fully and # remove this after everything's hunky-dory in production. 2015-03-06 if 'location_types' in data: data['obsolete_location_types'] = data.pop('location_types') self = super(Domain, cls).wrap(data) if self.deployment is None: self.deployment = Deployment() if should_save: self.save() return self def get_default_timezone(self): """return a timezone object from self.default_timezone""" import pytz return pytz.timezone(self.default_timezone) @staticmethod @quickcache(['name'], timeout=24 * 60 * 60) def is_secure_session_required(name): domain = Domain.get_by_name(name) return domain and domain.secure_sessions @staticmethod @skippable_quickcache(['couch_user._id', 'is_active'], skip_arg='strict', timeout=5*60, memoize_timeout=10) def active_for_couch_user(couch_user, is_active=True, strict=False): domain_names = couch_user.get_domains() return Domain.view( "domain/by_status", keys=[[is_active, d] for d in domain_names], reduce=False, include_docs=True, stale=settings.COUCH_STALE_QUERY if not strict else None, ).all() @staticmethod def active_for_user(user, is_active=True, strict=False): if isinstance(user, AnonymousUser): return [] from corehq.apps.users.models import CouchUser if isinstance(user, CouchUser): couch_user = user else: couch_user = CouchUser.from_django_user(user) if couch_user: return Domain.active_for_couch_user( couch_user, is_active=is_active, strict=strict) else: return [] @classmethod def field_by_prefix(cls, field, prefix=''): # unichr(0xfff8) is something close to the highest character available res = cls.view("domain/fields_by_prefix", group=True, startkey=[field, True, prefix], endkey=[field, True, "%s%c" % (prefix, unichr(0xfff8)), {}]) vals = [(d['value'], d['key'][2]) for d in res] vals.sort(reverse=True) return [(v[1], v[0]) for v in vals] def add(self, model_instance, is_active=True): """ Add something to this domain, through the generic relation. Returns the created membership object """ # Add membership info to Couch couch_user = model_instance.get_profile().get_couch_user() couch_user.add_domain_membership(self.name) couch_user.save() def applications(self): return get_brief_apps_in_domain(self.name) def full_applications(self, include_builds=True): from corehq.apps.app_manager.models import Application, RemoteApp WRAPPERS = {'Application': Application, 'RemoteApp': RemoteApp} def wrap_application(a): return WRAPPERS[a['doc']['doc_type']].wrap(a['doc']) if include_builds: startkey = [self.name] endkey = [self.name, {}] else: startkey = [self.name, None] endkey = [self.name, None, {}] return Application.get_db().view('app_manager/applications', startkey=startkey, endkey=endkey, include_docs=True, wrapper=wrap_application).all() @cached_property def versions(self): apps = self.applications() return list(set(a.application_version for a in apps)) @cached_property def has_case_management(self): for app in self.full_applications(): if app.doc_type == 'Application': if app.has_case_management(): return True return False @cached_property def has_media(self): for app in self.full_applications(): if app.doc_type == 'Application' and app.has_media(): return True return False @property def use_cloudcare_releases(self): return self.cloudcare_releases != 'nostars' def all_users(self): from corehq.apps.users.models import CouchUser return CouchUser.by_domain(self.name) def recent_submissions(self): return domain_has_submission_in_last_30_days(self.name) @cached_property def languages(self): apps = self.applications() return set(chain.from_iterable([a.langs for a in apps])) def readable_languages(self): return ', '.join(lang_lookup[lang] or lang for lang in self.languages()) def __unicode__(self): return self.name @classmethod @skippable_quickcache(['name'], skip_arg='strict', timeout=30*60) def get_by_name(cls, name, strict=False): if not name: # get_by_name should never be called with name as None (or '', etc) # I fixed the code in such a way that if I raise a ValueError # all tests pass and basic pages load, # but in order not to break anything in the wild, # I'm opting to notify by email if/when this happens # but fall back to the previous behavior of returning None if settings.DEBUG: raise ValueError('%r is not a valid domain name' % name) else: _assert = soft_assert(notify_admins=True, exponential_backoff=False) _assert(False, '%r is not a valid domain name' % name) return None def _get_by_name(stale=False): extra_args = {'stale': settings.COUCH_STALE_QUERY} if stale else {} result = cls.view("domain/domains", key=name, reduce=False, include_docs=True, **extra_args).first() if not isinstance(result, Domain): # A stale view may return a result with no doc if the doc has just been deleted. # In this case couchdbkit just returns the raw view result as a dict return None else: return result domain = _get_by_name(stale=(not strict)) if domain is None and not strict: # on the off chance this is a brand new domain, try with strict domain = _get_by_name(stale=False) return domain @classmethod def get_or_create_with_name(cls, name, is_active=False, secure_submissions=True, use_sql_backend=False): result = cls.view("domain/domains", key=name, reduce=False, include_docs=True).first() if result: return result else: new_domain = Domain( name=name, is_active=is_active, date_created=datetime.utcnow(), secure_submissions=secure_submissions, use_sql_backend=use_sql_backend, ) new_domain.save(**get_safe_write_kwargs()) return new_domain @classmethod def generate_name(cls, hr_name, max_length=25): ''' Generate a URL-friendly name based on a given human-readable name. Normalizes given name, then looks for conflicting domains, addressing conflicts by adding "-1", "-2", etc. May return None if it fails to generate a new, unique name. Throws exception if it can't figure out a name, which shouldn't happen unless max_length is absurdly short. ''' from corehq.apps.domain.utils import get_domain_url_slug name = get_domain_url_slug(hr_name, max_length=max_length) if not name: raise NameUnavailableException if Domain.get_by_name(name): prefix = name while len(prefix): name = next_available_name(prefix, Domain.get_names_by_prefix(prefix + '-')) if Domain.get_by_name(name): # should never happen raise NameUnavailableException if len(name) <= max_length: return name prefix = prefix[:-1] raise NameUnavailableException return name @classmethod def get_all(cls, include_docs=True): domains = Domain.view("domain/not_snapshots", include_docs=False).all() if not include_docs: return domains else: return imap(cls.wrap, iter_docs(cls.get_db(), [d['id'] for d in domains])) @classmethod def get_all_names(cls): return [d['key'] for d in cls.get_all(include_docs=False)] @classmethod def get_all_ids(cls): return [d['id'] for d in cls.get_all(include_docs=False)] @classmethod def get_names_by_prefix(cls, prefix): return [d['key'] for d in Domain.view( "domain/domains", startkey=prefix, endkey=prefix + u"zzz", reduce=False, include_docs=False ).all()] def case_sharing_included(self): return self.case_sharing or reduce(lambda x, y: x or y, [getattr(app, 'case_sharing', False) for app in self.applications()], False) def save(self, **params): self.last_modified = datetime.utcnow() if not self._rev: # mark any new domain as timezone migration complete set_migration_complete(self.name) super(Domain, self).save(**params) from corehq.apps.domain.signals import commcare_domain_post_save results = commcare_domain_post_save.send_robust(sender='domain', domain=self) for result in results: # Second argument is None if there was no error if result[1]: notify_exception( None, message="Error occured during domain post_save %s: %s" % (self.name, str(result[1])) ) def save_copy(self, new_domain_name=None, new_hr_name=None, user=None, copy_by_id=None, share_reminders=True, share_user_roles=True): from corehq.apps.app_manager.dbaccessors import get_app from corehq.apps.reminders.models import CaseReminderHandler from corehq.apps.fixtures.models import FixtureDataItem from corehq.apps.app_manager.dbaccessors import get_brief_apps_in_domain from corehq.apps.domain.dbaccessors import get_doc_ids_in_domain_by_class from corehq.apps.fixtures.models import FixtureDataType from corehq.apps.users.models import UserRole db = Domain.get_db() new_id = db.copy_doc(self.get_id)['id'] if new_domain_name is None: new_domain_name = new_id with CriticalSection(['request_domain_name_{}'.format(new_domain_name)]): new_domain_name = Domain.generate_name(new_domain_name) new_domain = Domain.get(new_id) new_domain.name = new_domain_name new_domain.hr_name = new_hr_name new_domain.copy_history = self.get_updated_history() new_domain.is_snapshot = False new_domain.snapshot_time = None new_domain.organization = None # TODO: use current user's organization (?) # reset stuff new_domain.cda.signed = False new_domain.cda.date = None new_domain.cda.type = None new_domain.cda.user_id = None new_domain.cda.user_ip = None new_domain.is_test = "none" new_domain.internal = InternalProperties() new_domain.creating_user = user.username if user else None new_domain.date_created = datetime.utcnow() for field in self._dirty_fields: if hasattr(new_domain, field): delattr(new_domain, field) # Saving the domain should happen before we import any apps since # importing apps can update the domain object (for example, if user # as a case needs to be enabled) new_domain.save() new_app_components = {} # a mapping of component's id to its copy def copy_data_items(old_type_id, new_type_id): for item in FixtureDataItem.by_data_type(self.name, old_type_id): comp = self.copy_component( item.doc_type, item._id, new_domain_name, user=user) comp.data_type_id = new_type_id comp.save() def get_latest_app_id(doc_id): app = get_app(self.name, doc_id).get_latest_saved() if app: return app._id, app.doc_type for app in get_brief_apps_in_domain(self.name): doc_id, doc_type = app.get_id, app.doc_type original_doc_id = doc_id if copy_by_id and doc_id not in copy_by_id: continue if not self.is_snapshot: doc_id, doc_type = get_latest_app_id(doc_id) or (doc_id, doc_type) component = self.copy_component(doc_type, doc_id, new_domain_name, user=user) if component: new_app_components[original_doc_id] = component for doc_id in get_doc_ids_in_domain_by_class(self.name, FixtureDataType): if copy_by_id and doc_id not in copy_by_id: continue component = self.copy_component( 'FixtureDataType', doc_id, new_domain_name, user=user) copy_data_items(doc_id, component._id) if share_reminders: for doc_id in get_doc_ids_in_domain_by_class(self.name, CaseReminderHandler): self.copy_component( 'CaseReminderHandler', doc_id, new_domain_name, user=user) if share_user_roles: for doc_id in get_doc_ids_in_domain_by_class(self.name, UserRole): self.copy_component('UserRole', doc_id, new_domain_name, user=user) if user: def add_dom_to_user(user): user.add_domain_membership(new_domain_name, is_admin=True) apply_update(user, add_dom_to_user) def update_events(handler): """ Change the form_unique_id to the proper form for each event in a newly copied CaseReminderHandler """ from corehq.apps.app_manager.models import FormBase for event in handler.events: if not event.form_unique_id: continue form = FormBase.get_form(event.form_unique_id) form_app = form.get_app() m_index, f_index = form_app.get_form_location(form.unique_id) form_copy = new_app_components[form_app._id].get_module(m_index).get_form(f_index) event.form_unique_id = form_copy.unique_id def update_for_copy(handler): handler.active = False update_events(handler) if share_reminders: for handler in CaseReminderHandler.get_handlers(new_domain_name): apply_update(handler, update_for_copy) return new_domain def reminder_should_be_copied(self, handler): from corehq.apps.reminders.models import ON_DATETIME return (handler.start_condition_type != ON_DATETIME and handler.user_group_id is None) def copy_component(self, doc_type, id, new_domain_name, user=None): from corehq.apps.app_manager.models import import_app from corehq.apps.users.models import UserRole from corehq.apps.reminders.models import CaseReminderHandler from corehq.apps.fixtures.models import FixtureDataType, FixtureDataItem str_to_cls = { 'UserRole': UserRole, 'CaseReminderHandler': CaseReminderHandler, 'FixtureDataType': FixtureDataType, 'FixtureDataItem': FixtureDataItem, } if doc_type in ('Application', 'RemoteApp'): new_doc = import_app(id, new_domain_name) new_doc.copy_history.append(id) new_doc.case_sharing = False # when copying from app-docs that don't have # unique_id attribute on Modules new_doc.ensure_module_unique_ids(should_save=False) else: cls = str_to_cls[doc_type] db = cls.get_db() if doc_type == 'CaseReminderHandler': cur_doc = cls.get(id) if not self.reminder_should_be_copied(cur_doc): return None new_id = db.copy_doc(id)['id'] new_doc = cls.get(new_id) for field in self._dirty_fields: if hasattr(new_doc, field): delattr(new_doc, field) if hasattr(cls, '_meta_fields'): for field in cls._meta_fields: if not field.startswith('_') and hasattr(new_doc, field): delattr(new_doc, field) new_doc.domain = new_domain_name if doc_type == 'FixtureDataType': new_doc.copy_from = id new_doc.is_global = True if self.is_snapshot and doc_type == 'Application': new_doc.prepare_multimedia_for_exchange() new_doc.save() return new_doc def save_snapshot(self, share_reminders, copy_by_id=None): if self.is_snapshot: return self else: try: copy = self.save_copy( copy_by_id=copy_by_id, share_reminders=share_reminders, share_user_roles=False) except NameUnavailableException: return None copy.is_snapshot = True head = self.snapshots(limit=1).first() if head and head.snapshot_head: head.snapshot_head = False head.save() copy.snapshot_head = True copy.snapshot_time = datetime.utcnow() del copy.deployment copy.save() return copy def snapshots(self, **view_kwargs): return Domain.view('domain/snapshots', startkey=[self._id, {}], endkey=[self._id], include_docs=True, reduce=False, descending=True, **view_kwargs ) @memoized def published_snapshot(self): snapshots = self.snapshots().all() for snapshot in snapshots: if snapshot.published: return snapshot return None def update_deployment(self, **kwargs): self.deployment.update(kwargs) self.save() def update_internal(self, **kwargs): self.internal.update(kwargs) self.save() def display_name(self): if self.is_snapshot: return "Snapshot of %s" % self.copied_from.display_name() return self.hr_name or self.name def long_display_name(self): if self.is_snapshot: return format_html("Snapshot of {}", self.copied_from.display_name()) return self.hr_name or self.name __str__ = long_display_name def get_license_display(self): return LICENSES.get(self.license) def get_license_url(self): return LICENSE_LINKS.get(self.license) def copies(self): return Domain.view('domain/copied_from_snapshot', key=self._id, include_docs=True) def copies_of_parent(self): return Domain.view('domain/copied_from_snapshot', keys=[s._id for s in self.copied_from.snapshots()], include_docs=True) def delete(self): self._pre_delete() super(Domain, self).delete() def _pre_delete(self): from corehq.apps.domain.signals import commcare_domain_pre_delete from corehq.apps.domain.deletion import apply_deletion_operations dynamic_deletion_operations = [] results = commcare_domain_pre_delete.send_robust(sender='domain', domain=self) for result in results: response = result[1] if isinstance(response, Exception): raise DomainDeleteException(u"Error occurred during domain pre_delete {}: {}".format(self.name, str(response))) elif response: assert isinstance(response, list) dynamic_deletion_operations.extend(response) # delete all associated objects for db, related_doc_ids in get_all_doc_ids_for_domain_grouped_by_db(self.name): iter_bulk_delete(db, related_doc_ids, chunksize=500) apply_deletion_operations(self.name, dynamic_deletion_operations) def all_media(self, from_apps=None): # todo add documentation or refactor from corehq.apps.hqmedia.models import CommCareMultimedia dom_with_media = self if not self.is_snapshot else self.copied_from if self.is_snapshot: app_ids = [app.copied_from.get_id for app in self.full_applications()] if from_apps: from_apps = set([a_id for a_id in app_ids if a_id in from_apps]) else: from_apps = app_ids if from_apps: media = [] media_ids = set() apps = [app for app in dom_with_media.full_applications() if app.get_id in from_apps] for app in apps: if app.doc_type != 'Application': continue for _, m in app.get_media_objects(): if m.get_id not in media_ids: media.append(m) media_ids.add(m.get_id) return media return CommCareMultimedia.view('hqmedia/by_domain', key=dom_with_media.name, include_docs=True).all() def most_restrictive_licenses(self, apps_to_check=None): from corehq.apps.hqmedia.utils import most_restrictive licenses = [m.license['type'] for m in self.all_media(from_apps=apps_to_check) if m.license] return most_restrictive(licenses) @classmethod def get_module_by_name(cls, domain_name): """ import and return the python module corresponding to domain_name, or None if it doesn't exist. """ from corehq.apps.domain.utils import get_domain_module_map module_name = get_domain_module_map().get(domain_name, domain_name) try: return import_module(module_name) if module_name else None except ImportError: return None @property @memoized def commtrack_settings(self): # this import causes some dependency issues so lives in here from corehq.apps.commtrack.models import CommtrackConfig if self.commtrack_enabled: return CommtrackConfig.for_domain(self.name) else: return None @property def has_custom_logo(self): return (self['_attachments'] and LOGO_ATTACHMENT in self['_attachments']) def get_custom_logo(self): if not self.has_custom_logo: return None return ( self.fetch_attachment(LOGO_ATTACHMENT), self['_attachments'][LOGO_ATTACHMENT]['content_type'] ) def get_case_display(self, case): """Get the properties display definition for a given case""" return self.case_display.case_details.get(case.type) def get_form_display(self, form): """Get the properties display definition for a given XFormInstance""" return self.case_display.form_details.get(form.xmlns) @property def total_downloads(self): """ Returns the total number of downloads from every snapshot created from this domain """ from corehq.apps.domain.dbaccessors import count_downloads_for_all_snapshots return count_downloads_for_all_snapshots(self.get_id) @property @memoized def download_count(self): """ Updates and returns the total number of downloads from every sister snapshot. """ if self.is_snapshot: self.full_downloads = self.copied_from.total_downloads return self.full_downloads @property @memoized def published_by(self): from corehq.apps.users.models import CouchUser pb_id = self.cda.user_id return CouchUser.get_by_user_id(pb_id) if pb_id else None @property def name_of_publisher(self): return self.published_by.human_friendly_name if self.published_by else "" @property def location_types(self): from corehq.apps.locations.models import LocationType return LocationType.objects.filter(domain=self.name).all() @memoized def has_privilege(self, privilege): from corehq.apps.accounting.utils import domain_has_privilege return domain_has_privilege(self, privilege) @property @memoized def uses_locations(self): from corehq import privileges from corehq.apps.locations.models import LocationType return (self.has_privilege(privileges.LOCATIONS) and (self.commtrack_enabled or LocationType.objects.filter(domain=self.name).exists())) @property def supports_multiple_locations_per_user(self): """ This method is a wrapper around the toggle that enables multiple location functionality. Callers of this method should know that this is special functionality left around for special applications, and not a feature flag that should be set normally. """ return toggles.MULTIPLE_LOCATIONS_PER_USER.enabled(self.name) def convert_to_commtrack(self): """ One-stop-shop to make a domain CommTrack """ from corehq.apps.commtrack.util import make_domain_commtrack make_domain_commtrack(self) def clear_caches(self): from .utils import domain_restricts_superusers super(Domain, self).clear_caches() self.get_by_name.clear(self.__class__, self.name) self.is_secure_session_required.clear(self.name) domain_restricts_superusers.clear(self.name)
class DataSourceMeta(DocumentSchema): build = SchemaProperty(DataSourceBuildInformation) # If this is a linked datasource, this is the ID of the datasource this pulls from master_id = StringProperty()
class OpenmrsRepeater(CaseRepeater): """ ``OpenmrsRepeater`` is responsible for updating OpenMRS patients with changes made to cases in CommCare. It is also responsible for creating OpenMRS "visits", "encounters" and "observations" when a corresponding visit form is submitted in CommCare. The ``OpenmrsRepeater`` class is different from most repeater classes in three details: 1. It has a case type and it updates the OpenMRS equivalent of cases like the ``CaseRepeater`` class, but it reads forms like the ``FormRepeater`` class. So it subclasses ``CaseRepeater`` but its payload format is ``form_json``. 2. It makes many API calls for each payload. 3. It can have a location. """ class Meta(object): app_label = 'repeaters' include_app_id_param = False friendly_name = _("Forward to OpenMRS") payload_generator_classes = (FormRepeaterJsonPayloadGenerator, ) location_id = StringProperty(default='') openmrs_config = SchemaProperty(OpenmrsConfig) _has_config = True # self.white_listed_case_types must have exactly one case type set # for Atom feed integration to add cases for OpenMRS patients. # self.location_id must be set to determine their case owner. The # owner is set to the first CommCareUser instance found at that # location. atom_feed_enabled = BooleanProperty(default=False) atom_feed_status = SchemaDictProperty(AtomFeedStatus) def __init__(self, *args, **kwargs): super(OpenmrsRepeater, self).__init__(*args, **kwargs) def __eq__(self, other): return (isinstance(other, self.__class__) and self.get_id == other.get_id) def __str__(self): return Repeater.__str__(self) @classmethod def wrap(cls, data): if 'atom_feed_last_polled_at' in data: data['atom_feed_status'] = { ATOM_FEED_NAME_PATIENT: { 'last_polled_at': data.pop('atom_feed_last_polled_at'), 'last_page': data.pop('atom_feed_last_page', None), } } return super(OpenmrsRepeater, cls).wrap(data) @cached_property def requests(self): # Used by atom_feed module and views that don't have a payload # associated with the request return get_requests(self) @cached_property def first_user(self): return get_one_commcare_user_at_location(self.domain, self.location_id) @memoized def payload_doc(self, repeat_record): return FormAccessors(repeat_record.domain).get_form( repeat_record.payload_id) @property def form_class_name(self): """ The class name used to determine which edit form to use """ return self.__class__.__name__ @classmethod def available_for_domain(cls, domain): return OPENMRS_INTEGRATION.enabled(domain) def allowed_to_forward(self, payload): """ Forward the payload if ... * it did not come from OpenMRS, and * CaseRepeater says it's OK for the case types and users of any of the payload's cases, and * this repeater forwards to the right OpenMRS server for any of the payload's cases. :param payload: An XFormInstance (not a case) """ if payload.xmlns == XMLNS_OPENMRS: # payload came from OpenMRS. Don't send it back. return False case_blocks = extract_case_blocks(payload) case_ids = [case_block['@case_id'] for case_block in case_blocks] cases = CaseAccessors(payload.domain).get_cases(case_ids, ordered=True) if not any( CaseRepeater.allowed_to_forward(self, case) for case in cases): # If none of the case updates in the payload are allowed to # be forwarded, drop it. return False if not self.location_id: # If this repeater does not have a location, all payloads # should go to it. return True repeaters = [ repeater for case in cases for repeater in get_case_location_ancestor_repeaters(case) ] # If this repeater points to the wrong OpenMRS server for this # payload then let the right repeater handle it. return self in repeaters def get_payload(self, repeat_record): payload = super(OpenmrsRepeater, self).get_payload(repeat_record) return json.loads(payload) def send_request(self, repeat_record, payload): value_source_configs: Iterable[JsonDict] = chain( self.openmrs_config.case_config.patient_identifiers.values(), self.openmrs_config.case_config.person_properties.values(), self.openmrs_config.case_config.person_preferred_name.values(), self.openmrs_config.case_config.person_preferred_address.values(), self.openmrs_config.case_config.person_attributes.values(), ) case_trigger_infos = get_relevant_case_updates_from_form_json( self.domain, payload, case_types=self.white_listed_case_types, extra_fields=[ conf["case_property"] for conf in value_source_configs if "case_property" in conf ], form_question_values=get_form_question_values(payload), ) requests = get_requests(self, repeat_record.payload_id) try: response = send_openmrs_data( requests, self.domain, payload, self.openmrs_config, case_trigger_infos, ) except Exception as err: requests.notify_exception(str(err)) return RepeaterResponse(400, 'Bad Request', pformat_json(str(err))) return response
class DataSourceConfiguration(UnicodeMixIn, CachedCouchDocumentMixin, Document): """ A data source configuration. These map 1:1 with database tables that get created. Each data source can back an arbitrary number of reports. """ domain = StringProperty(required=True) engine_id = StringProperty(default=UCR_ENGINE_ID) es_index_settings = SchemaProperty(ElasticSearchIndexSettings) backend_id = StringProperty(default=UCR_SQL_BACKEND) referenced_doc_type = StringProperty(required=True) table_id = StringProperty(required=True) display_name = StringProperty() base_item_expression = DictProperty() configured_filter = DictProperty() configured_indicators = ListProperty() named_expressions = DictProperty() named_filters = DictProperty() meta = SchemaProperty(DataSourceMeta) is_deactivated = BooleanProperty(default=False) last_modified = DateTimeProperty() asynchronous = BooleanProperty(default=False) sql_column_indexes = SchemaListProperty(SQLColumnIndexes) class Meta(object): # prevent JsonObject from auto-converting dates etc. string_conversions = () def __unicode__(self): return u'{} - {}'.format(self.domain, self.display_name) def save(self, **params): self.last_modified = datetime.utcnow() super(DataSourceConfiguration, self).save(**params) def filter(self, document): filter_fn = self._get_main_filter() return filter_fn(document, EvaluationContext(document, 0)) def deleted_filter(self, document): filter_fn = self._get_deleted_filter() return filter_fn and filter_fn(document, EvaluationContext( document, 0)) @memoized def _get_main_filter(self): return self._get_filter([self.referenced_doc_type]) @memoized def _get_deleted_filter(self): return self._get_filter(get_deleted_doc_types( self.referenced_doc_type), include_configured=False) def _get_filter(self, doc_types, include_configured=True): if not doc_types: return None extras = ([self.configured_filter] if include_configured and self.configured_filter else []) built_in_filters = [ self._get_domain_filter_spec(), { 'type': 'or', 'filters': [{ 'type': 'property_match', 'property_name': 'doc_type', 'property_value': doc_type, } for doc_type in doc_types], }, ] return FilterFactory.from_spec( { 'type': 'and', 'filters': built_in_filters + extras, }, context=self.get_factory_context(), ) def _get_domain_filter_spec(self): return { 'type': 'property_match', 'property_name': 'domain', 'property_value': self.domain, } @property @memoized def named_expression_objects(self): named_expression_specs = deepcopy(self.named_expressions) named_expressions = {} spec_error = None while named_expression_specs: number_generated = 0 for name, expression in named_expression_specs.items(): try: named_expressions[name] = ExpressionFactory.from_spec( expression, FactoryContext(named_expressions=named_expressions, named_filters={})) number_generated += 1 del named_expression_specs[name] except BadSpecError as spec_error: # maybe a nested name resolution issue, try again on the next pass pass if number_generated == 0 and named_expression_specs: # we unsuccessfully generated anything on this pass and there are still unresolved # references. we have to fail. assert spec_error is not None raise spec_error return named_expressions @property @memoized def named_filter_objects(self): return { name: FilterFactory.from_spec( filter, FactoryContext(self.named_expression_objects, {})) for name, filter in self.named_filters.items() } def get_factory_context(self): return FactoryContext(self.named_expression_objects, self.named_filter_objects) @property @memoized def default_indicators(self): default_indicators = [ IndicatorFactory.from_spec( { "column_id": "doc_id", "type": "expression", "display_name": "document id", "datatype": "string", "is_nullable": False, "is_primary_key": True, "expression": { "type": "root_doc", "expression": { "type": "property_name", "property_name": "_id" } } }, self.get_factory_context()) ] default_indicators.append( IndicatorFactory.from_spec({ "type": "inserted_at", }, self.get_factory_context())) if self.base_item_expression: default_indicators.append( IndicatorFactory.from_spec({ "type": "repeat_iteration", }, self.get_factory_context())) return default_indicators @property @memoized def indicators(self): return CompoundIndicator( self.display_name, self.default_indicators + [ IndicatorFactory.from_spec(indicator, self.get_factory_context()) for indicator in self.configured_indicators ], None, ) @property @memoized def parsed_expression(self): if self.base_item_expression: return ExpressionFactory.from_spec( self.base_item_expression, context=self.get_factory_context()) return None def get_columns(self): return self.indicators.get_columns() def get_items(self, document, eval_context=None): if self.filter(document): if not self.base_item_expression: return [document] else: result = self.parsed_expression(document, eval_context) if result is None: return [] elif isinstance(result, list): return result else: return [result] else: return [] def get_all_values(self, doc, eval_context=None): if not eval_context: eval_context = EvaluationContext(doc) rows = [] for item in self.get_items(doc, eval_context): indicators = self.indicators.get_values(item, eval_context) rows.append(indicators) eval_context.increment_iteration() return rows def get_report_count(self): """ Return the number of ReportConfigurations that reference this data source. """ return ReportConfiguration.count_by_data_source(self.domain, self._id) def validate(self, required=True): super(DataSourceConfiguration, self).validate(required) # these two properties implicitly call other validation self._get_main_filter() self._get_deleted_filter() # validate indicators and column uniqueness columns = [c.id for c in self.indicators.get_columns()] unique_columns = set(columns) if len(columns) != len(unique_columns): for column in set(columns): columns.remove(column) raise BadSpecError( _('Report contains duplicate column ids: {}').format(', '.join( set(columns)))) if self.referenced_doc_type not in VALID_REFERENCED_DOC_TYPES: raise BadSpecError( _('Report contains invalid referenced_doc_type: {}').format( self.referenced_doc_type)) self.parsed_expression @classmethod def by_domain(cls, domain): return get_datasources_for_domain(domain) @classmethod def all_ids(cls): return [ res['id'] for res in cls.get_db().view( 'userreports/data_sources_by_build_info', reduce=False, include_docs=False) ] @classmethod def all(cls): for result in iter_docs(cls.get_db(), cls.all_ids()): yield cls.wrap(result) @property def is_static(self): return id_is_static(self._id) def deactivate(self): if not self.is_static: self.is_deactivated = True self.save() get_indicator_adapter(self).drop_table() def get_es_index_settings(self): es_index_settings = self.es_index_settings.to_json() es_index_settings.pop('doc_type') return {"settings": es_index_settings} def get_case_type_or_xmlns_filter(self): """Returns a list of case types or xmlns from the filter of this data source. If this can't figure out the case types or xmlns's that filter, then returns [None] """ def _get_property_value(config_filter, prop_name): if config_filter.get('type') != 'boolean_expression': return [None] if config_filter['operator'] not in ('eq', 'in'): return [None] expression = config_filter['expression'] if expression['type'] == 'property_name' and expression[ 'property_name'] == prop_name: prop_value = config_filter['property_value'] if not isinstance(prop_value, list): prop_value = [prop_value] return prop_value return [None] if self.referenced_doc_type == 'CommCareCase': prop_value = _get_property_value(self.configured_filter, 'type') if prop_value: return prop_value elif self.referenced_doc_type == 'XFormInstance': prop_value = _get_property_value(self.configured_filter, 'xmlns') if prop_value: return prop_value return [None]
class OpenmrsConfig(DocumentSchema): openmrs_provider = StringProperty(required=False) case_config = SchemaProperty(OpenmrsCaseConfig) form_configs = ListProperty(OpenmrsFormConfig)
class ReportConfiguration(UnicodeMixIn, CachedCouchDocumentMixin, Document): """ A report configuration. These map 1:1 with reports that show up in the UI. """ domain = StringProperty(required=True) visible = BooleanProperty(default=True) config_id = StringProperty(required=True) title = StringProperty() description = StringProperty() aggregation_columns = StringListProperty() filters = ListProperty() columns = ListProperty() configured_charts = ListProperty() sort_expression = ListProperty() report_meta = SchemaProperty(ReportMeta) def __unicode__(self): return u'{} - {}'.format(self.domain, self.title) @property @memoized def config(self): try: return DataSourceConfiguration.get(self.config_id) except ResourceNotFound: raise BadSpecError(_('The data source referenced by this report could not be found.')) @property @memoized def report_columns(self): return [ReportColumnFactory.from_spec(c) for c in self.columns] @property @memoized def ui_filters(self): return [ReportFilterFactory.from_spec(f) for f in self.filters] @property @memoized def charts(self): return [ChartFactory.from_spec(g._obj) for g in self.configured_charts] @property @memoized def sort_order(self): return [ReportOrderByFactory.from_spec(e) for e in self.sort_expression] @property def table_id(self): return self.config.table_id def get_ui_filter(self, filter_slug): for filter in self.ui_filters: if filter.name == filter_slug: return filter return None def get_languages(self): """ Return the languages used in this report's column and filter display properties. Note that only explicitly identified languages are returned. So, if the display properties are all strings, "en" would not be returned. """ langs = set() for item in self.columns + self.filters: if isinstance(item['display'], dict): langs |= set(item['display'].keys()) return langs def validate(self, required=True): def _check_for_duplicates(supposedly_unique_list, error_msg): # http://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-python-list duplicate_items = set( [item for item in supposedly_unique_list if supposedly_unique_list.count(item) > 1] ) if len(duplicate_items) > 0: raise BadSpecError( _(error_msg).format(', '.join(sorted(duplicate_items))) ) super(ReportConfiguration, self).validate(required) # check duplicates before passing to factory since it chokes on them _check_for_duplicates( [FilterSpec.wrap(f).slug for f in self.filters], 'Filters cannot contain duplicate slugs: {}', ) _check_for_duplicates( [column_id for c in self.report_columns for column_id in c.get_column_ids()], 'Columns cannot contain duplicate column_ids: {}', ) # these calls all implicitly do validation ReportFactory.from_spec(self) self.ui_filters self.charts self.sort_order @classmethod def by_domain(cls, domain): return get_report_configs_for_domain(domain) @classmethod def all(cls): return get_all_report_configs()