Esempio n. 1
0
def _log_failed_periodic_data(email, message):
    soft_assert(to='{}@{}'.format('bbuczyk', 'dimagi.com'))(
        False, "ANALYTICS - Failed to sync periodic data", {
            'user_email': email,
            'message': message,
        }
    )
Esempio n. 2
0
    def _rebuild_sql_tables(self, adapters):
        tables_by_engine = defaultdict(dict)
        for adapter in adapters:
            sql_adapter = get_indicator_adapter(adapter.config)
            try:
                tables_by_engine[sql_adapter.engine_id][sql_adapter.get_table().name] = sql_adapter
            except BadSpecError:
                _soft_assert = soft_assert(to='{}@{}'.format('jemord', 'dimagi.com'))
                _soft_assert(False, "Broken data source {}".format(adapter.config.get_id))

        _assert = soft_assert(notify_admins=True)
        _notify_rebuild = lambda msg, obj: _assert(False, msg, obj)

        for engine_id, table_map in tables_by_engine.items():
            engine = connection_manager.get_engine(engine_id)
            table_names = list(table_map)
            with engine.begin() as connection:
                migration_context = get_migration_context(connection, table_names)
                raw_diffs = compare_metadata(migration_context, metadata)
                diffs = reformat_alembic_diffs(raw_diffs)

            tables_to_rebuild = get_tables_to_rebuild(diffs, table_names)
            for table_name in tables_to_rebuild:
                sql_adapter = table_map[table_name]
                if not sql_adapter.config.is_static:
                    try:
                        self.rebuild_table(sql_adapter)
                    except TableRebuildError as e:
                        _notify_rebuild(six.text_type(e), sql_adapter.config.to_json())
                else:
                    self.rebuild_table(sql_adapter)

            tables_to_migrate = get_tables_to_migrate(diffs, table_names)
            tables_to_migrate -= tables_to_rebuild
            migrate_tables(engine, raw_diffs, tables_to_migrate)
Esempio n. 3
0
def JSON(obj):
    # json.dumps does not properly convert QueryDict array parameter to json
    if isinstance(obj, QueryDict):
        obj = dict(obj)
    try:
        return mark_safe(escape_script_tags(json.dumps(obj, default=json_handler)))
    except TypeError as e:
        msg = ("Unserializable data was sent to the `|JSON` template tag.  "
               "If DEBUG is off, Django will silently swallow this error.  "
               "{}".format(e.message))
        soft_assert(notify_admins=True)(False, msg)
        raise e
Esempio n. 4
0
def view_app(request, domain, app_id=None):
    from corehq.apps.app_manager.views.view_generic import view_generic
    # redirect old m=&f= urls
    module_id = request.GET.get('m', None)
    form_id = request.GET.get('f', None)
    if module_id or form_id:
        soft_assert('{}@{}'.format('skelly', 'dimagi.com')).call(
            False, 'old m=&f= url still in use'
        )
        return back_to_main(request, domain, app_id=app_id, module_id=module_id,
                            form_id=form_id)

    return view_generic(request, domain, app_id)
Esempio n. 5
0
def _log_media_deletion(app, deleted_media):
    # https://dimagi-dev.atlassian.net/browse/ICDS-2
    formatted_media = [
        {'path': path, 'map_item': map_item.to_json(), 'media': media.as_dict() if media else None}
        for path, map_item, media in deleted_media
    ]
    soft_assert(to='{}@{}'.format('skelly', 'dimagi.com'))(
        False, "path deleted from multimedia map", json.dumps({
            'domain': app.domain,
            'app_id': app._id,
            'deleted_media': list(formatted_media),
        }, indent=4)
    )
Esempio n. 6
0
 def finalize(self):
     # When serving a cached initial payload we should still generate a new sync log
     # This is to avoid issues with multiple devices ending up syncing to the same
     # sync log, which causes all kinds of assertion errors when the two devices
     # touch the same cases
     if self and self.is_initial:
         try:
             file_reference = copy_payload_and_synclog_and_get_new_file(self.payload)
             self.payload = file_reference.file
             self.payload_path = file_reference.path
         except Exception, e:
             # don't fail hard if anything goes wrong since this is an edge case optimization
             soft_assert(to=['czue' + '@' + 'dimagi.com'])(False, u'Error finalizing cached log: {}'.format(e))
Esempio n. 7
0
def validate_phone_datetime(datetime_string, none_ok=False):
    if none_ok:
        if datetime_string is None:
            return None
        if not datetime_string != '':
            soft_assert('@'.join(['droberts', 'dimagi.com']))(
                False,
                'phone datetime should never be empty'
            )
            return None
    try:
        return iso8601.parse_date(datetime_string)
    except iso8601.ParseError:
        raise PhoneDateValueError('{!r}'.format(datetime_string))
Esempio n. 8
0
    def _rebuild_sql_tables(self, adapters):
        # todo move this code to sql adapter rebuild_if_necessary
        tables_by_engine = defaultdict(dict)
        for adapter in adapters:
            sql_adapter = get_indicator_adapter(adapter.config)
            tables_by_engine[sql_adapter.engine_id][sql_adapter.get_table().name] = sql_adapter

        _assert = soft_assert(to='@'.join(['czue', 'dimagi.com']))
        _notify_cory = lambda msg, obj: _assert(False, msg, obj)

        for engine_id, table_map in tables_by_engine.items():
            engine = connection_manager.get_engine(engine_id)
            with engine.begin() as connection:
                migration_context = get_migration_context(connection, table_map.keys())
                raw_diffs = compare_metadata(migration_context, metadata)
                diffs = reformat_alembic_diffs(raw_diffs)

            tables_to_rebuild = get_tables_to_rebuild(diffs, table_map.keys())
            for table_name in tables_to_rebuild:
                sql_adapter = table_map[table_name]
                if not sql_adapter.config.is_static:
                    try:
                        rev_before_rebuild = sql_adapter.config.get_db().get_rev(sql_adapter.config._id)
                        self.rebuild_table(sql_adapter)
                    except TableRebuildError, e:
                        _notify_cory(unicode(e), sql_adapter.config.to_json())
                else:
                    self.rebuild_table(sql_adapter)
Esempio n. 9
0
 def get_filtered_data_for_parsed_params(domain, parsed_params):
     # this code path has multiple forks:
     # 0. if status isn't set (which cory doesn't think is possible) it defaults to filtering by
     #    status "active". otherwise it will set status to be "active" or "deleted" depending
     #    on what's passed in.
     # 1. if status is set, but nothing else is, it will return all forms in apps of that status
     # 2. if status and app_id are set, but nothing else, it will return all forms in that app
     # 3. if status and app_id and module_id are set, it will return all forms in that module if
     #    the module is valid, otherwise it falls back to the app
     # 4. if status, app_id, module_id, and xmlns are set (which cory doesn't think is possible)
     #    it returns that form.
     deleted = parsed_params.status == PARAM_VALUE_STATUS_DELETED
     _assert = soft_assert(to='@'.join(['czue', 'dimagi.com']))
     if parsed_params.module is not None and parsed_params.get_module_int() is None:
         # todo: remove anytime in 2016
         _assert(False, "module set but not a valid number!")
         return get_form_details_for_app(domain, parsed_params.app_id, deleted=deleted)
     elif parsed_params.most_granular_filter == 'xmlns':
         # todo: remove anytime in 2016
         _assert(False, "got to form ID even though this shouldn't be possible")
         return get_form_details_for_app_and_xmlns(
             domain, parsed_params.app_id, parsed_params.xmlns, deleted=deleted)
     elif parsed_params.most_granular_filter == 'module':
         return get_form_details_for_app_and_module(
             domain, parsed_params.app_id, parsed_params.get_module_int(), deleted=deleted
         )
     elif parsed_params.most_granular_filter == 'app_id':
         return get_form_details_for_app(domain, parsed_params.app_id, deleted=deleted)
     elif parsed_params.most_granular_filter == 'status':
         return get_all_form_details(domain, deleted=deleted)
     else:
         # todo: remove anytime in 2016
         _assert(False, 'most granular filter was a surprising value ({}).'.format(
             parsed_params.most_granular_filter))
         return get_all_form_details(domain)
Esempio n. 10
0
def copy_data_to_backup():
    if settings.SERVER_ENVIRONMENT == 'production':
        # https://<your_username>:<your_password>@commcarehq.cloudant.com/commcarehq
        prod_couchdb_connection = 'https://{username}:{password}@commcarehq.cloudant.com/{database}'.format(
            username=settings.COUCH_USERNAME,
            password=settings.COUCH_PASSWORD,
            database=settings.COUCH_DATABASE_NAME,
        )
        guinea_couchdb_connection = 'https://{username}:{password}@commcarehq.cloudant.com/{database}'.format(
            username=settings.COUCH_USERNAME,
            password=settings.COUCH_PASSWORD,
            database=GUINEA_CONTACT_TRACING_DATABASE,
        )
        last_update = BackupRecord.objects.order_by('last_update')[0].last_update

        call_command('copy_domain',
                     prod_couchdb_connection,
                     GUINEA_CONTACT_TRACING_DOMAIN,
                     guinea_couchdb_connection,
                     **{'since': unicode(last_update),
                        'run_multi_process': False})

        # A dumb soft assert to make sure I see this working
        _assert = soft_assert(to='{}@{}'.format('tsheffels', 'dimagi.com'),
                              notify_admins=False,
                              exponential_backoff=False)
        _assert(False)

        successful_insert = BackupRecord(last_update=datetime.now())
        successful_insert.save()
Esempio n. 11
0
def send_to_kafka(producer, topic, change_meta):
    def _send_to_kafka():
        producer.send_messages(
            bytes(topic),
            bytes(change_meta.domain.encode('utf-8') if change_meta.domain is not None else None),
            bytes(json.dumps(change_meta.to_json())),
        )

    try:
        try:
            _send_to_kafka()
        except FailedPayloadsError:
            # this typically an inactivity timeout - which can happen if the feed is low volume
            # just do a simple retry
            _send_to_kafka()
    except LeaderNotAvailableError:
        # kafka seems to be down. sleep a bit to avoid crazy amounts of error spam
        time.sleep(15)
        raise
    except Exception as e:
        _assert = soft_assert(to='@'.join(['czue', 'dimagi.com']))
        _assert(False, 'Problem sending change to kafka {}: {} ({})'.format(
            change_meta.to_json(), e, type(e)
        ))
        raise
Esempio n. 12
0
    def update_user(self, existing_user=None, save=True, **kwargs):
        is_update_successful = False

        # From what I can tell, everything that invokes this method invokes it
        # with a value for existing_user. It also looks like the code below is
        # not behaving properly for mobile workers when existing_user is None.
        # If the soft asserts confirm this isn't ever being passed existing_user=None,
        # I propose making existing_user a required arg and removing the code below
        # that creates the user.
        _assert = soft_assert('@'.join(['gcapalbo', 'dimagi.com']), exponential_backoff=False)
        _assert(existing_user is not None, "existing_user is None")

        if not existing_user and 'email' in self.cleaned_data:
            from django.contrib.auth.models import User
            django_user = User()
            django_user.username = self.cleaned_data['email']
            django_user.save()
            existing_user = CouchUser.from_django_user(django_user)
            existing_user.save()
            is_update_successful = True

        for prop in self.direct_properties:
            setattr(existing_user, prop, self.cleaned_data[prop])
            is_update_successful = True

        if is_update_successful and save:
            existing_user.save()
        return is_update_successful
def _migrate_linked_apps(apps, schema_editor):
    app_db = LinkedApplication.get_db()
    linked_apps = get_all_docs_with_doc_types(
        app_db, ['LinkedApplication', 'LinkedApplication-Deleted']
    )
    errors = []
    for app_doc in linked_apps:
        remote_details = None
        remote_url = app_doc.pop('remote_url_base', None)
        if remote_url:
            auth = app_doc.pop('remote_auth', {})
            remote_details = RemoteLinkDetails(
                remote_url,
                auth.get('username'),
                auth.get('api_key'),
            )

        master_domain = app_doc.pop('master_domain', None)
        if not master_domain and not remote_url:
            master_domain = get_app(None, app_doc['master']).domain
        try:
            DomainLink.link_domains(app_doc['domain'], master_domain, remote_details)
        except DomainLinkError as e:
            errors.append(str(e))
        else:
            app_db.save_doc(app_doc)

    _assert = soft_assert('{}@dimagi.com'.format('skelly'), exponential_backoff=False)
    _assert(not errors, 'Errors migrating linked apps to linked domain', {
        'errors': errors
    })
def should_sync(domain, last_sync, utcnow=None):
    # definitely sync if we haven't synced before
    if not last_sync or not last_sync.date:
        return True

    # utcnow only used in tests to mock other times
    utcnow = utcnow or datetime.utcnow()

    try:
        timezone = domain.get_default_timezone()
    except pytz.UnknownTimeZoneError:
        timezone = utc

    _assert = soft_assert(to=['droberts' + '@' + 'dimagi.com'])

    last_sync_utc = last_sync.date

    if not _assert(last_sync_utc.tzinfo is None,
                   'last_sync.date should be an offset-naive dt'):
        last_sync_utc = UserTime(last_sync_utc).server_time().done()

    # check if user has already synced today (in local timezone).
    # Indicators only change daily.
    last_sync_local = ServerTime(last_sync_utc).user_time(timezone).done()
    current_date_local = ServerTime(utcnow).user_time(timezone).done()

    if current_date_local.date() != last_sync_local.date():
        return True

    return False
Esempio n. 15
0
def get_indicator_table(indicator_config, metadata, override_table_name=None):
    sql_columns = [column_to_sql(col) for col in indicator_config.get_columns()]
    table_name = override_table_name or get_table_name(indicator_config.domain, indicator_config.table_id)
    columns_by_col_id = {col.database_column_name.decode('utf-8') for col in indicator_config.get_columns()}
    extra_indices = []
    for index in indicator_config.sql_column_indexes:
        if set(index.column_ids).issubset(columns_by_col_id):
            extra_indices.append(Index(
                _custom_index_name(table_name, index.column_ids),
                *index.column_ids
            ))
        else:
            _assert = soft_assert('{}@{}'.format('jemord', 'dimagi.com'))
            _assert(False, "Invalid index specified on {}".format(table_name))
            break
    constraints = [PrimaryKeyConstraint(*indicator_config.pk_columns)]
    columns_and_indices = sql_columns + extra_indices + constraints
    # todo: needed to add extend_existing=True to support multiple calls to this function for the same table.
    # is that valid?
    return sqlalchemy.Table(
        table_name,
        metadata,
        extend_existing=True,
        *columns_and_indices
    )
Esempio n. 16
0
def send_to_kafka(producer, topic, change_meta):
    def _send_to_kafka():
        producer.send_messages(
            bytes(topic),
            bytes(change_meta.domain.encode('utf-8') if change_meta.domain is not None else None),
            bytes(json.dumps(change_meta.to_json())),
        )

    try:
        tries = 3
        for i in range(tries):
            # try a few times because the python kafka libraries can trigger timeouts
            # if they are idle for a while.
            try:
                _send_to_kafka()
                break
            except (FailedPayloadsError, KafkaUnavailableError, LeaderNotAvailableError):
                if i == (tries - 1):
                    # if it's the last try, fail hard
                    raise
    except LeaderNotAvailableError:
        # kafka seems to be down. sleep a bit to avoid crazy amounts of error spam
        time.sleep(15)
        raise
    except Exception as e:
        _assert = soft_assert(notify_admins=True)
        _assert(False, 'Problem sending change to kafka {}: {} ({})'.format(
            change_meta.to_json(), e, type(e)
        ))
        raise
Esempio n. 17
0
    def validate_state(self):
        check_version(self.params.version)
        if self.last_sync_log:
            if self.params.state_hash:
                parsed_hash = CaseStateHash.parse(self.params.state_hash)
                computed_hash = self.last_sync_log.get_state_hash()
                if computed_hash != parsed_hash:
                    # log state error on the sync log
                    self.last_sync_log.had_state_error = True
                    self.last_sync_log.error_date = datetime.utcnow()
                    self.last_sync_log.error_hash = str(parsed_hash)
                    self.last_sync_log.save()

                    exception = BadStateException(
                        server_hash=computed_hash,
                        phone_hash=parsed_hash,
                        case_ids=self.last_sync_log.get_footprint_of_cases_on_phone()
                    )
                    if self.last_sync_log.log_format == LOG_FORMAT_SIMPLIFIED:
                        from corehq.apps.reports.standard.deployments import SyncHistoryReport
                        _assert = soft_assert(to=['czue' + '@' + 'dimagi.com'])
                        sync_history_url = '{}?individual={}'.format(
                            SyncHistoryReport.get_url(self.domain),
                            self.user.user_id
                        )
                        _assert(False, '{}, sync history report: {}'.format(exception, sync_history_url))
                    raise exception
Esempio n. 18
0
    def get_by_name(cls, name, strict=False):
        if not name:
            # get_by_name should never be called with name as None (or '', etc)
            # I fixed the code in such a way that if I raise a ValueError
            # all tests pass and basic pages load,
            # but in order not to break anything in the wild,
            # I'm opting to notify by email if/when this happens
            # but fall back to the previous behavior of returning None
            if settings.DEBUG:
                raise ValueError('%r is not a valid domain name' % name)
            else:
                _assert = soft_assert(notify_admins=True, exponential_backoff=False)
                _assert(False, '%r is not a valid domain name' % name)
                return None

        def _get_by_name(stale=False):
            extra_args = {'stale': settings.COUCH_STALE_QUERY} if stale else {}
            result = cls.view("domain/domains", key=name, reduce=False, include_docs=True, **extra_args).first()
            if not isinstance(result, Domain):
                # A stale view may return a result with no doc if the doc has just been deleted.
                # In this case couchdbkit just returns the raw view result as a dict
                return None
            else:
                return result

        domain = _get_by_name(stale=(not strict))
        if domain is None and not strict:
            # on the off chance this is a brand new domain, try with strict
            domain = _get_by_name(stale=False)
        return domain
Esempio n. 19
0
    def __call__(self, restore_user, version, last_sync=None, app=None):
        assert isinstance(restore_user, OTARestoreUser)

        domain = restore_user.project
        fixtures = []

        if self._should_return_no_fixtures(domain, last_sync):
            return fixtures

        config = None
        if app:
            try:
                config = get_call_center_config_from_app(app)
            except:
                notify_exception(None, "Error getting call center config from app", details={
                    'domain': app.domain,
                    'app_id': app.get_id
                })

        if config:
            _assert = soft_assert(['skelly_at_dimagi_dot_com'.replace('_at_', '@').replace('_dot_', '.')])
            _assert(not config.includes_legacy(), 'Domain still using legacy call center indicators', {
                'domain': domain,
                'config': config.to_json()
            })

        try:
            fixtures.append(gen_fixture(restore_user, restore_user.get_call_center_indicators(config)))
        except Exception:  # blanket exception catching intended
            notify_exception(None, 'problem generating callcenter fixture', details={
                'user_id': restore_user.user_id,
                'domain': restore_user.domain
            })

        return fixtures
Esempio n. 20
0
        def _remove_case(to_remove):
            # uses closures for assertions
            logger.debug('removing: {}'.format(to_remove))
            assert to_remove in self.dependent_case_ids_on_phone
            indices = self.index_tree.indices.pop(to_remove, {})
            if to_remove != case_id:
                # if the case had indexes they better also be in our removal list (except for ourselves)
                for index in indices.values():
                    assert index in candidates_to_remove, \
                        "expected {} in {} but wasn't".format(index, candidates_to_remove)
            try:
                self.case_ids_on_phone.remove(to_remove)
            except KeyError:
                # todo: this here to avoid having to manually clean up after
                # http://manage.dimagi.com/default.asp?179664
                # it should be removed when there are no longer any instances of the assertion
                if self.date < datetime(2015, 8, 25):
                    _assert = soft_assert(to=['czue' + '@' + 'dimagi.com'], exponential_backoff=False)
                    _assert(False, 'patching sync log {} to remove missing case ID {}!'.format(
                        self._id, to_remove)
                    )
                else:
                    raise

            self.dependent_case_ids_on_phone.remove(to_remove)
Esempio n. 21
0
def _soft_assert_tz_not_string(tz):
    _assert = soft_assert(to=["*****@*****.**"], skip_frames=1)

    if not _assert(hasattr(tz, "localize"), "Timezone should be a tzinfo object, not a string"):
        # tz is a string, or at least string-like
        return pytz.timezone(smart_str(tz))
    else:
        return tz
Esempio n. 22
0
 def xpath(self, path):
     """
     Evaluates an xpath expression like: path/to/node and returns the value
     of that element, or None if there is no value.
     """
     _soft_assert = soft_assert(to='{}@{}'.format('brudolph', 'dimagi.com'))
     _soft_assert(False, "Reference to xpath instead of get_data")
     return safe_index(self, path.split("/"))
Esempio n. 23
0
def get_timezone_for_domain(domain):
    current_domain = Domain.get_by_name(domain)
    _assert = soft_assert('@'.join(['droberts', 'dimagi.com']))
    if _assert(current_domain, "get_timezone_for_domain passed fake domain",
               {'domain': domain}):
        return coerce_timezone_value(current_domain.default_timezone)
    else:
        return pytz.UTC
Esempio n. 24
0
    def by_user(cls, user, wrap=True, domain=None):
        group_ids = Group.by_user(user, wrap=False)

        if isinstance(user, dict):
            # Added 2015-07-31, feel free to remove eventually.
            _assert = soft_assert('@'.join(['esoergel', 'dimagi.com']))
            _assert(False, "This apparently IS called with a user dict. How?")

            user_id = user.get('user_id')
            user_domain = domain
            location = CommCareUser.get(user_id).sql_location
        else:
            user_id = user.user_id
            user_domain = user.domain
            location = user.sql_location

        loc_ids = location.path if location else []

        def make_keys(owner_type, ids):
            return [[user_domain, 'data_item by {}'.format(owner_type), id_]
                    for id_ in ids]

        fixture_ids = set(
            FixtureOwnership.get_db().view('fixtures/ownership',
                keys=(make_keys('user', [user_id]) +
                      make_keys('group', group_ids) +
                      make_keys('location', loc_ids)),
                reduce=False,
                wrapper=lambda r: r['value'],
            )
        )
        if wrap:
            results = cls.get_db().view('_all_docs', keys=list(fixture_ids), include_docs=True)

            # sort the results into those corresponding to real documents
            # and those corresponding to deleted or non-existent documents
            docs = []
            deleted_fixture_ids = set()

            for result in results:
                if result.get('doc'):
                    docs.append(cls.wrap(result['doc']))
                elif result.get('error'):
                    assert result['error'] == 'not_found'
                    deleted_fixture_ids.add(result['key'])
                else:
                    assert result['value']['deleted'] is True
                    deleted_fixture_ids.add(result['id'])

            # fetch and delete ownership documents pointing
            # to deleted or non-existent fixture documents
            # this cleanup is necessary since we used to not do this
            bad_ownerships = FixtureOwnership.for_all_item_ids(deleted_fixture_ids, user_domain)
            FixtureOwnership.get_db().bulk_delete(bad_ownerships)

            return docs
        else:
            return fixture_ids
Esempio n. 25
0
 def kafka(self):
     # load everything lazily to avoid doing this work if not needed
     if self._kafka is None and not self._has_error:
         self._kafka = get_kafka_client_or_none()
         if self._kafka is None:
             _assert = soft_assert(notify_admins=True)
             _assert(settings.DEBUG, 'Kafka is not available! Change producer is doing nothing.')
             self._has_error = True
     return self._kafka
Esempio n. 26
0
def set_cleanliness_flags(domain, owner_id, force_full=False):
    """
    For a given owner ID, manually sets the cleanliness flag on that ID.
    """
    if not domain or len(domain) > 100:
        raise InvalidDomainError(u'Domain {} must be a non-empty string less than 100 characters'.format(domain))
    if not owner_id or len(owner_id) > 100:
        raise InvalidOwnerIdError(
            u'Owner ID {} must be a non-empty string less than 100 characters'.format(owner_id)
        )
    cleanliness_object = OwnershipCleanlinessFlag.objects.get_or_create(
        owner_id=owner_id,
        domain=domain,
        defaults={'is_clean': False}
    )[0]

    def needs_full_check(domain, cleanliness_obj):
        # if it already is clean we don't need to do anything since that gets invalidated on submission
        return (
            # if clean, only check if the toggle is not enabled since then it won't be properly invalidated
            # on submission
            cleanliness_obj.is_clean and not OWNERSHIP_CLEANLINESS.enabled(domain)
        ) or (
            # if dirty, first check the hint and only do a full check if it's not valid
            not cleanliness_object.is_clean and (
                not cleanliness_object.hint or not hint_still_valid(domain, owner_id, cleanliness_object.hint)
            )
        )

    needs_check = needs_full_check(domain, cleanliness_object)
    previous_clean_flag = cleanliness_object.is_clean
    if force_full or needs_check:
        # either the hint wasn't set, wasn't valid or we're forcing a rebuild - rebuild from scratch
        cleanliness_flag = get_cleanliness_flag_from_scratch(domain, owner_id)
        cleanliness_object.is_clean = cleanliness_flag.is_clean
        cleanliness_object.hint = cleanliness_flag.hint

    if force_full and not needs_check and previous_clean_flag and not cleanliness_object.is_clean:
        # we went from clean to dirty and would not have checked except that we forced it
        # this seems to indicate a problem in the logic that invalidates the flag, unless the feature
        # flag was turned off for the domain. either way cory probably wants to know.
        try:
            document = get_db().get(owner_id)
        except ResourceNotFound:
            document = {'doc_type': 'unknown'}

        owner_doc_type = document.get('doc_type', None)
        # filter out docs where we expect this to be broken (currently just web users)
        if owner_doc_type != 'WebUser':
            _assert = soft_assert(to=['czue' + '@' + 'dimagi.com'], exponential_backoff=False, fail_if_debug=False)
            _assert(False, 'Cleanliness flags out of sync for a {} with id {} in domain {}!'.format(
                owner_doc_type, owner_id, domain
            ))

    else:
        cleanliness_object.last_checked = datetime.utcnow()
        cleanliness_object.save()
Esempio n. 27
0
def force_to_change(dict_or_change):
    if not isinstance(dict_or_change, Change):
        if not settings.UNIT_TESTING:
            from corehq.util.soft_assert import soft_assert
            _assert = soft_assert(to=['czue' + '@' + 'dimagi.com'], exponential_backoff=True)
            _assert(False, u"Change wasn't a Change object!", dict_or_change)
        assert isinstance(dict_or_change, dict)
        return change_from_couch_row(dict_or_change)
    return dict_or_change
Esempio n. 28
0
def _notify_on_change(static_toggle, added_entries, username):
    is_deprecated_toggle = (static_toggle.tag in (TAG_DEPRECATED, TAG_CUSTOM, TAG_INTERNAL))
    if added_entries and (static_toggle.notification_emails or is_deprecated_toggle):
        subject = "User {} added {} on {} in environment {}".format(
            username, static_toggle.slug,
            added_entries, settings.SERVER_ENVIRONMENT
        )

        if static_toggle.notification_emails:
            emails = [
                "{}@{}.com".format(email, "dimagi")
                for email in static_toggle.notification_emails
            ]
            _assert = soft_assert(to=emails, send_to_ops=is_deprecated_toggle)
        else:
            _assert = soft_assert(send_to_ops=is_deprecated_toggle)

        _assert(False, subject)
Esempio n. 29
0
 def cases(self):
     _assert = soft_assert('@'.join(['droberts', 'dimagi.com']))
     _assert(False, "I'm surprised GroupReferenceMixIn ever gets called!")
     case_ids = get_case_ids_in_domain_by_owner(
         self.domain, owner_id__in=self.all_owner_ids, closed=False)
     # really inefficient, but can't find where it's called
     # and this is what it was doing before
     return [CommCareCase.wrap(doc)
             for doc in iter_docs(CommCareCase.get_db(), case_ids)]
Esempio n. 30
0
        def _remove_case(to_remove):
            # uses closures for assertions
            logger.debug("removing: {}".format(to_remove))
            assert to_remove in self.dependent_case_ids_on_phone
            indices = self.index_tree.indices.pop(to_remove, {})
            if to_remove != case_id:
                # if the case had indexes they better also be in our removal list (except for ourselves)
                for index in indices.values():
                    if not _domain_has_legacy_toggle_set():
                        # unblocking http://manage.dimagi.com/default.asp?185850#1039475
                        _assert = soft_assert(
                            to=["czue" + "@" + "dimagi.com"], exponential_backoff=False, fail_if_debug=True
                        )
                        _assert(
                            index in candidates_to_remove,
                            "expected {} in {} but wasn't".format(index, candidates_to_remove),
                        )
            try:
                self.case_ids_on_phone.remove(to_remove)
            except KeyError:
                _assert = soft_assert(to=["czue" + "@" + "dimagi.com"], exponential_backoff=False)

                def _should_fail_softly():
                    def _sync_log_was_old():
                        # todo: this here to avoid having to manually clean up after
                        # http://manage.dimagi.com/default.asp?179664
                        # it should be removed when there are no longer any instances of the assertion
                        if self.date < datetime(2015, 8, 25):
                            _assert(
                                False, "patching sync log {} to remove missing case ID {}!".format(self._id, to_remove)
                            )
                            return True
                        return False

                    return _domain_has_legacy_toggle_set() or _sync_log_was_old()

                if _should_fail_softly():
                    pass
                else:
                    # this is only a soft assert for now because of http://manage.dimagi.com/default.asp?181443
                    # we should convert back to a real Exception when we stop getting any of these
                    _assert(False, "case {} already removed from sync log {}".format(to_remove, self._id))

            self.dependent_case_ids_on_phone.remove(to_remove)
Esempio n. 31
0
def send_email_to_dev_more(domain, user, query, total_results):
    """Dev wanted an email with every query that is performed on the CLE.

    ¯\_(ツ)_/¯
    """
    _assert = soft_assert(["@".join(['dmore', 'dimagi.com'])],
                          exponential_backoff=False,
                          send_to_ops=False)
    _assert(
        False, "Case List Explorer Query Performed", {
            'Note':
            "Hi Dev! Someone just performed a query with the case list explorer. Cool!  -CLEBOT",
            'Domain': domain,
            'User': user,
            'Query': query if query else "Empty Query",
            'Total Results': total_results,
        })
Esempio n. 32
0
def get_cloudcare_session_data(domain_name, form, couch_user):
    from corehq.apps.app_manager.suite_xml.sections.entries import EntriesHelper

    datums = EntriesHelper.get_new_case_id_datums_meta(form)
    session_data = {datum.datum.id: uuid.uuid4().hex for datum in datums}
    if couch_user.doc_type == 'CommCareUser':  # smsforms.app.start_session could pass a CommCareCase
        try:
            extra_datums = EntriesHelper.get_extra_case_id_datums(form)
        except SuiteError as err:
            _assert = soft_assert(['nhooper_at_dimagi_dot_com'.replace('_at_', '@').replace('_dot_', '.')])
            _assert(False, 'Domain "%s": %s' % (domain_name, err))
        else:
            if EntriesHelper.any_usercase_datums(extra_datums):
                usercase_id = couch_user.get_usercase_id()
                if usercase_id:
                    session_data[USERCASE_ID] = usercase_id
    return session_data
Esempio n. 33
0
 def _get_config(self):
     try:
         config = (CaseSearchConfig.objects.prefetch_related(
             'fuzzy_properties').prefetch_related('ignore_patterns').get(
                 domain=self.domain))
     except CaseSearchConfig.DoesNotExist as e:
         from corehq.util.soft_assert import soft_assert
         _soft_assert = soft_assert(to="{}@{}.com".format(
             'frener', 'dimagi'),
                                    notify_admins=False,
                                    send_to_ops=False)
         _soft_assert(
             False,
             "Someone in domain: {} tried accessing case search without a config"
             .format(self.domain), e)
         config = CaseSearchConfig(domain=self.domain)
     return config
Esempio n. 34
0
    def __call__(self, restore_state):
        restore_user = restore_state.restore_user
        domain = restore_user.project
        fixtures = []

        if self._should_return_no_fixtures(domain,
                                           restore_state.last_sync_log):
            return fixtures

        config = None
        app = restore_state.params.app
        if app:
            try:
                config = get_call_center_config_from_app(app)
            except:
                notify_exception(None,
                                 "Error getting call center config from app",
                                 details={
                                     'domain': app.domain,
                                     'app_id': app.get_id
                                 })

        if config:
            _assert = soft_assert([
                'skelly_at_dimagi_dot_com'.replace('_at_',
                                                   '@').replace('_dot_', '.')
            ])
            _assert(not config.includes_legacy(),
                    'Domain still using legacy call center indicators', {
                        'domain': domain.name,
                        'config': config.to_json()
                    })

        indicator_set = restore_user.get_call_center_indicators(config)
        if indicator_set:
            try:
                fixtures.append(gen_fixture(restore_user, indicator_set))
            except Exception:  # blanket exception catching intended
                notify_exception(None,
                                 'problem generating callcenter fixture',
                                 details={
                                     'user_id': restore_user.user_id,
                                     'domain': restore_user.domain
                                 })

        return fixtures
Esempio n. 35
0
def set_cleanliness_flags(domain, owner_id, force_full=False, raise_soft_assertions=True):
    """
    For a given owner ID, manually sets the cleanliness flag on that ID.
    """
    if not domain or len(domain) > 100:
        raise InvalidDomainError(u'Domain {} must be a non-empty string less than 100 characters'.format(domain))
    if not owner_id or len(owner_id) > 100:
        raise InvalidOwnerIdError(
            u'Owner ID {} must be a non-empty string less than 100 characters'.format(owner_id)
        )
    cleanliness_object = OwnershipCleanlinessFlag.objects.get_or_create(
        owner_id=owner_id,
        domain=domain,
        defaults={'is_clean': False}
    )[0]

    def needs_full_check(domain, cleanliness_obj):
        # if it already is clean we don't need to do anything since that gets invalidated on submission
        # if dirty, first check the hint and only do a full check if it's not valid
        return not cleanliness_obj.is_clean and (
            not cleanliness_obj.hint or not hint_still_valid(domain, cleanliness_obj.hint)
        )

    needs_check = needs_full_check(domain, cleanliness_object)
    previous_clean_flag = cleanliness_object.is_clean
    if force_full or needs_check:
        # either the hint wasn't set, wasn't valid or we're forcing a rebuild - rebuild from scratch
        cleanliness_flag = get_cleanliness_flag_from_scratch(domain, owner_id)
        cleanliness_object.is_clean = cleanliness_flag.is_clean
        cleanliness_object.hint = cleanliness_flag.hint

    if force_full and not needs_check and previous_clean_flag and not cleanliness_object.is_clean:
        # we went from clean to dirty and would not have checked except that we forced it
        # this seems to indicate a problem in the logic that invalidates the flag, unless the feature
        # flag was turned off for the domain. either way cory probably wants to know.

        # filter out docs where we expect this to be broken (currently just web users)
        if not _is_web_user(owner_id) and raise_soft_assertions:
            _assert = soft_assert(notify_admins=True, exponential_backoff=False, fail_if_debug=False)
            _assert(False, 'Cleanliness flags out of sync for user {} in domain {}!'.format(
                owner_id, domain
            ))

    cleanliness_object.last_checked = datetime.utcnow()
    cleanliness_object.save()
Esempio n. 36
0
def rebuild_sql_tables(adapters):
    tables_by_engine = defaultdict(dict)
    all_adapters = []
    for adapter in adapters:
        if getattr(adapter, 'all_adapters', None):
            all_adapters.extend(adapter.all_adapters)
        else:
            all_adapters.append(adapter)
    for adapter in all_adapters:
        tables_by_engine[adapter.engine_id][adapter.get_table().name] = adapter

    _assert = soft_assert(notify_admins=True)
    _notify_rebuild = lambda msg, obj: _assert(False, msg, obj)

    for engine_id, table_map in tables_by_engine.items():
        table_names = list(table_map)
        engine = connection_manager.get_engine(engine_id)

        diffs = get_table_diffs(engine, table_names, get_metadata(engine_id))

        tables_to_act_on = get_tables_rebuild_migrate(diffs)
        for table_name in tables_to_act_on.rebuild:
            sql_adapter = table_map[table_name]
            pillow_logging.info(
                "[rebuild] Rebuilding table: %s, from config %s at rev %s",
                table_name, sql_adapter.config._id, sql_adapter.config._rev)
            pillow_logging.info("[rebuild] Using config: %r",
                                sql_adapter.config)
            pillow_logging.info("[rebuild] sqlalchemy metadata: %r",
                                get_metadata(engine_id).tables[table_name])
            pillow_logging.info("[rebuild] sqlalchemy table: %r",
                                sql_adapter.get_table())
            table_diffs = [
                diff for diff in diffs if diff.table_name == table_name
            ]
            if not sql_adapter.config.is_static:
                try:
                    rebuild_table(sql_adapter, table_diffs)
                except TableRebuildError as e:
                    _notify_rebuild(str(e), sql_adapter.config.to_json())
            else:
                rebuild_table(sql_adapter, table_diffs)

        migrate_tables_with_logging(engine, diffs, tables_to_act_on.migrate,
                                    table_map)
Esempio n. 37
0
def search(request, domain):
    """
    Accepts search criteria as GET params, e.g. "https://www.commcarehq.org/a/domain/phone/search/?a=b&c=d"
    Returns results as a fixture with the same structure as a casedb instance.
    """
    criteria = request.GET.dict()
    try:
        case_type = criteria.pop('case_type')
    except KeyError:
        return HttpResponse('Search request must specify case type',
                            status=400)
    try:
        include_closed = criteria.pop('include_closed')
    except KeyError:
        include_closed = False

    search_es = (CaseSearchES().domain(domain).case_type(case_type).size(
        CASE_SEARCH_MAX_RESULTS))

    if include_closed != 'True':
        search_es = search_es.is_closed(False)

    try:
        config = CaseSearchConfig.objects.get(domain=domain)
    except CaseSearchConfig.DoesNotExist as e:
        from corehq.util.soft_assert import soft_assert
        _soft_assert = soft_assert(to="{}@{}.com".format('frener', 'dimagi'),
                                   notify_admins=False,
                                   send_to_ops=False)
        _soft_assert(
            False,
            "Someone in domain: {} tried accessing case search without a config"
            .format(domain), e)
        config = CaseSearchConfig(domain=domain)

    fuzzies = config.config.get_fuzzy_properties_for_case_type(case_type)
    for key, value in criteria.items():
        search_es = search_es.case_property_query(key,
                                                  value,
                                                  fuzzy=(key in fuzzies))
    results = search_es.values()
    # Even if it's a SQL domain, we just need to render the results as cases, so CommCareCase.wrap will be fine
    cases = [CommCareCase.wrap(flatten_result(result)) for result in results]
    fixtures = CaseDBFixture(cases).fixture
    return HttpResponse(fixtures, content_type="text/xml")
Esempio n. 38
0
def get_cloudcare_session_data(domain_name, form, couch_user):
    from corehq.apps.hqcase.utils import get_case_by_domain_hq_user_id
    from corehq.apps.app_manager.suite_xml import SuiteGenerator

    datums = SuiteGenerator.get_new_case_id_datums_meta(form)
    session_data = {datum['datum'].id: uuid.uuid4().hex for datum in datums}
    if couch_user.doc_type == 'CommCareUser':  # smsforms.app.start_session could pass a CommCareCase
        try:
            extra_datums = SuiteGenerator.get_extra_case_id_datums(form)
        except SuiteError as err:
            _assert = soft_assert(['nhooper_at_dimagi_dot_com'.replace('_at_', '@').replace('_dot_', '.')])
            _assert(False, 'Domain "%s": %s' % (domain_name, err))
        else:
            if SuiteGenerator.any_usercase_datums(extra_datums):
                usercase = get_case_by_domain_hq_user_id(domain_name, couch_user.get_id, USERCASE_TYPE)
                if usercase:
                    session_data[USERCASE_ID] = usercase.get_id
    return session_data
Esempio n. 39
0
    def _get_fixture_node(self, fixture_id, restore_user, locations_queryset,
                          location_type_attrs, data_fields):
        root_node = Element('fixture', {'id': fixture_id,
                                        'user_id': restore_user.user_id,
                                        'indexed': 'true'})
        outer_node = Element('locations')
        root_node.append(outer_node)
        all_locations = list(locations_queryset.order_by('site_code'))
        locations_by_id = {location.pk: location for location in all_locations}
        for location in all_locations:
            attrs = {
                'type': location.location_type.code,
                'id': location.location_id,
            }
            attrs.update({attr: '' for attr in location_type_attrs})
            attrs['{}_id'.format(location.location_type.code)] = location.location_id

            current_location = location
            while current_location.parent_id:
                try:
                    current_location = locations_by_id[current_location.parent_id]
                except KeyError:
                    current_location = current_location.parent

                    # For some reason this wasn't included in the locations we already fetched
                    from corehq.util.soft_assert import soft_assert
                    _soft_assert = soft_assert('{}@{}.com'.format('frener', 'dimagi'))
                    message = (
                        "The flat location fixture didn't prefetch all parent "
                        "locations: {domain}: {location_id}. User id: {user_id}"
                    ).format(
                        domain=current_location.domain,
                        location_id=current_location.location_id,
                        user_id=restore_user.user_id,
                    )
                    _soft_assert(False, msg=message)

                attrs['{}_id'.format(current_location.location_type.code)] = current_location.location_id

            location_node = Element('location', attrs)
            _fill_in_location_element(location_node, location, data_fields)
            outer_node.append(location_node)

        return root_node
Esempio n. 40
0
def apply_index_changes(engine, raw_diffs, table_names):
    indexes = _get_indexes_to_change(raw_diffs, table_names)
    remove_indexes = [
        index.index for index in indexes
        if index.action == DiffTypes.REMOVE_INDEX
    ]
    add_indexes = [
        index.index for index in indexes if index.action == DiffTypes.ADD_INDEX
    ]

    with engine.begin() as conn:
        for index in add_indexes:
            index.create(conn)

    # don't remove indexes automatically because we want to be able to add them
    # concurrently without the code removing them
    _assert = soft_assert(to="@".join(["jemord", "dimagi.com"]))
    for index in remove_indexes:
        _assert(False, "Index {} can be removed".format(index.name))
Esempio n. 41
0
    def _submit_caseblocks(domain, case_type, caseblocks):
        err = False
        if caseblocks:
            try:
                form, cases = submit_case_blocks(
                    [cb.case.as_string() for cb in caseblocks],
                    domain,
                    username,
                    user_id,
                )

                if form.is_error:
                    errors.add(
                        error=ImportErrors.ImportErrorMessage,
                        row_number=form.problem
                    )
            except Exception:
                err = True
                for row_number, case in caseblocks:
                    errors.add(
                        error=ImportErrors.ImportErrorMessage,
                        row_number=row_number
                    )
            else:
                if record_form_callback:
                    record_form_callback(form.form_id)
                properties = set().union(*map(lambda c: set(c.dynamic_case_properties().keys()), cases))
                if case_type and len(properties):
                    add_inferred_export_properties.delay(
                        'CaseImporter',
                        domain,
                        case_type,
                        properties,
                    )
                else:
                    _soft_assert = soft_assert(notify_admins=True)
                    _soft_assert(
                        len(properties) == 0,
                        'error adding inferred export properties in domain '
                        '({}): {}'.format(domain, ", ".join(properties))
                    )
        return err
Esempio n. 42
0
def get_topojson_for_district(state, district):
    path = get_topojson_directory()
    district_topojson_data = get_district_topojson_data()
    # if we have the state name already use that
    filename = None
    if state in district_topojson_data:
        filename = district_topojson_data[state]['file_name']
    else:
        # legacy support - missing state name so look for the district by name across all states
        _assert = soft_assert('@'.join(['czue', 'dimagi.com']),
                              fail_if_debug=True)
        _assert(False, f"State {state} not found in district topojosn file!")
        for state, data in district_topojson_data.items():
            if district in data['districts']:
                filename = data['file_name']
                break
    if filename:
        with open(os.path.join(path, 'blocks/' + filename),
                  encoding='utf-8') as f:
            return json.loads(f.read())
Esempio n. 43
0
    def submit_caseblocks(self, caseblocks):
        if not caseblocks:
            return

        try:
            form, cases = submit_case_blocks(
                [cb.case.as_text() for cb in caseblocks],
                self.domain,
                self.user.username,
                self.user.user_id,
                device_id=__name__ + ".do_import",
            )
            if form.is_error:
                raise Exception("Form error during case import: {}".format(
                    form.problem))
        except Exception:
            notify_exception(
                None, "Case Importer: Uncaught failure submitting caseblocks")
            for row_number, case in caseblocks:
                self.results.add_error(row_number,
                                       exceptions.ImportErrorMessage())
        else:
            if self.record_form_callback:
                self.record_form_callback(form.form_id)
            properties = {
                p
                for c in cases for p in c.dynamic_case_properties().keys()
            }
            if self.config.case_type and len(properties):
                add_inferred_export_properties.delay(
                    'CaseImporter',
                    self.domain,
                    self.config.case_type,
                    properties,
                )
            else:
                _soft_assert = soft_assert(notify_admins=True)
                _soft_assert(
                    len(properties) == 0,
                    'error adding inferred export properties in domain '
                    '({}): {}'.format(self.domain, ", ".join(properties)))
Esempio n. 44
0
def track_domain_request(calculated_prop):
    """
    Use this decorator to audit requests by domain.
    """
    norman = ''.join(reversed('moc.igamid@repoohn'))
    _soft_assert = soft_assert(to=norman)

    def _dec(view_func):

        @wraps(view_func)
        def _wrapped(*args, **kwargs):
            if 'domain' in kwargs:
                domain = kwargs['domain']
            elif (
                len(args) > 2
                and isinstance(args[0], View)
                and isinstance(args[1], HttpRequest)
                and isinstance(args[2], str)
            ):
                # class-based view; args == (self, request, domain, ...)
                domain = args[2]
            elif (
                len(args) > 1
                and isinstance(args[0], HttpRequest)
                and isinstance(args[1], str)
            ):
                # view function; args == (request, domain, ...)
                domain = args[1]
            else:
                domain = None
            if _soft_assert(
                    domain,
                    'Unable to track_domain_request("{prop}") on view "{view}". Unable to determine domain from '
                    'args {args}.'.format(prop=calculated_prop, view=view_func.__name__, args=args)
            ):
                DomainAuditRecordEntry.update_calculations(domain, calculated_prop)
            return view_func(*args, **kwargs)

        return _wrapped

    return _dec
Esempio n. 45
0
 def get_adherence_image_key(self, adherence_cases, date):
     primary_adherence_case = self.get_primary_adherence_case(
         adherence_cases)
     adherence_value = self.get_adherence_value(primary_adherence_case)
     if len(adherence_cases) == 0 or adherence_value == "missing_data":
         return self.unknown_img_holder(date)
     if adherence_value == "unobserved_dose":
         if self.get_adherence_source(primary_adherence_case) == "99DOTS":
             return "unobserved_dose_dot"
         return "unobserved_dose"
     if adherence_value not in ("dose_unknown_expected",
                                "unobserved_dose_dot",
                                "self_administered_dose", "unobserved_dose",
                                "missed_dose", "directly_observed_dose",
                                None, ""):
         assert_ = soft_assert(to='ncarnahan' + '@' + 'dimagi' + '.com')
         assert_(
             False,
             "Got an unexpected adherence_value of {} for case {}".format(
                 adherence_value, primary_adherence_case.case_id))
     return adherence_value
Esempio n. 46
0
def is_aggregate_inactive_aww_data_fresh(send_email=False):
    # Heuristic to check if collect_inactive_awws task ran succesfully today or yesterday
    #   This would return False if both today and yesterday's task failed
    #   or if the last-submission is older than a day due to pillow lag.
    last_submission = AggregateInactiveAWW.objects.filter(
        last_submission__isnull=False
    ).aggregate(Max('last_submission'))['last_submission__max']
    if not last_submission:
        return False
    is_fresh = last_submission >= (datetime.today() - timedelta(days=1)).date()
    SMS_TEAM = ['{}@{}'.format('icds-sms-rule', 'dimagi.com')]
    if not send_email:
        return is_fresh
    _soft_assert = soft_assert(to=SMS_TEAM, send_to_ops=False)
    if is_fresh:
        _soft_assert(False, "The weekly inactive SMS rule is successfully triggered for this week")
    else:
        _soft_assert(False,
            "The weekly inactive SMS rule is skipped for this week as latest_submission {} data is older than one day"
            .format(str(last_submission))
        )
    return is_fresh
Esempio n. 47
0
    def _get_prescription_threshold_to_send(episode_case,
                                            check_already_sent=True):
        from custom.enikshay.integrations.bets.repeaters import BETSDrugRefillRepeater
        thresholds_to_send = [
            n for n in TOTAL_DAY_THRESHOLDS if BETSDrugRefillRepeater.
            prescription_total_days_threshold_in_trigger_state(
                episode_case.dynamic_case_properties(),
                n,
                check_already_sent=check_already_sent)
        ]
        if check_already_sent:
            _assert = soft_assert('{}@{}.com'.format('frener', 'dimagi'))
            message = (
                "Repeater should not have allowed to forward if there were more or less than"
                "one threshold to trigger. Episode case: {}".format(
                    episode_case.case_id))
            _assert(len(thresholds_to_send) == 1, message)

        try:
            return thresholds_to_send[-1]
        except IndexError:
            return 0
Esempio n. 48
0
    def get_all(self):
        status = self.status or CASE_STATUS_ALL
        if status == CASE_STATUS_ALL:
            case_ids = get_case_ids_in_domain(self.domain, type=self.case_type)
        elif status == CASE_STATUS_OPEN:
            case_ids = get_open_case_ids_in_domain(self.domain,
                                                   type=self.case_type)
        elif status == CASE_STATUS_CLOSED:
            _assert = soft_assert('@'.join(['droberts', 'dimagi.com']))
            _assert(
                False, "I'm surprised CaseAPIHelper "
                "ever gets called with status=closed")
            # this is rare so we don't care if it requires two calls to get
            # all the ids
            case_ids = (
                set(get_case_ids_in_domain(self.domain, type=self.case_type)) -
                set(
                    get_open_case_ids_in_domain(self.domain,
                                                type=self.case_type)))
        else:
            raise ValueError("Invalid value for 'status': '%s'" % status)

        return self._case_results(case_ids)
Esempio n. 49
0
 def handle(self, **options):
     workers = options['workers']
     batch_by = options['batch_by']
     batch_size = options['batch_size']
     util = AuditCareMigrationUtil()
     batches = []
     try:
         while True:
             if options['only_errored']:
                 batches = util.get_errored_keys(5)
                 if not batches:
                     print("No errored keys present")
                     return
             elif options['cancelled']:
                 batches = util.get_cancelled_keys(5)
                 if not batches:
                     print("No cancelled keys present")
                     return
             else:
                 batches = util.generate_batches(workers, batch_by)
             if not batches:
                 print("No batches to process")
                 return
             batched_processes = [
                 gevent.spawn(copy_events_to_sql,
                              *batch,
                              batch_size=batch_size) for batch in batches
             ]
             gevent.joinall([*batched_processes])
     except MissingStartTimeError as e:
         message = f"Error in copy_events_to_sql while generating batches\n{e}"
         notify_exception(None, message=message)
         _soft_assert = soft_assert(to="{}@{}.com".format(
             'aphulera', 'dimagi'),
                                    notify_admins=False)
         _soft_assert(False, message)
         return
Esempio n. 50
0
def store_billable(self, msg):
    if not isinstance(msg, SMS):
        raise Exception("Expected msg to be an SMS")

    if msg.couch_id and not SmsBillable.objects.filter(log_id=msg.couch_id).exists():
        try:
            msg.text.encode('iso-8859-1')
            msg_length = 160
        except UnicodeEncodeError:
            # This string contains unicode characters, so the allowed
            # per-sms message length is shortened
            msg_length = 70
        try:
            SmsBillable.create(
                msg,
                multipart_count=int(math.ceil(len(msg.text) / msg_length)),
            )
        except RetryBillableTaskException as e:
            self.retry(exc=e)
        except DataError:
            from corehq.util.soft_assert import soft_assert
            _soft_assert = soft_assert(to='{}@{}'.format('jemord', 'dimagi.com'))
            _soft_assert(len(msg.domain) < 25, "Domain name too long: " + msg.domain)
            raise
Esempio n. 51
0
    def _rebuild_sql_tables(self, adapters):
        # todo move this code to sql adapter rebuild_if_necessary
        tables_by_engine = defaultdict(dict)
        for adapter in adapters:
            sql_adapter = get_indicator_adapter(adapter.config)
            tables_by_engine[sql_adapter.engine_id][
                sql_adapter.get_table().name] = sql_adapter

        _assert = soft_assert(notify_admins=True)
        _notify_rebuild = lambda msg, obj: _assert(False, msg, obj)

        for engine_id, table_map in tables_by_engine.items():
            engine = connection_manager.get_engine(engine_id)
            table_names = list(table_map)
            with engine.begin() as connection:
                migration_context = get_migration_context(
                    connection, table_names)
                raw_diffs = compare_metadata(migration_context, metadata)
                diffs = reformat_alembic_diffs(raw_diffs)

            tables_to_rebuild = get_tables_to_rebuild(diffs, table_names)
            for table_name in tables_to_rebuild:
                sql_adapter = table_map[table_name]
                if not sql_adapter.config.is_static:
                    try:
                        self.rebuild_table(sql_adapter)
                    except TableRebuildError as e:
                        _notify_rebuild(six.text_type(e),
                                        sql_adapter.config.to_json())
                else:
                    self.rebuild_table(sql_adapter)

            tables_with_index_changes = get_tables_with_index_changes(
                diffs, table_names)
            tables_with_index_changes -= tables_to_rebuild
            apply_index_changes(engine, raw_diffs, tables_with_index_changes)
Esempio n. 52
0
def get_indicator_table(indicator_config, custom_metadata=None):
    sql_columns = [column_to_sql(col) for col in indicator_config.get_columns()]
    table_name = get_table_name(indicator_config.domain, indicator_config.table_id)
    columns_by_col_id = {col.database_column_name for col in indicator_config.get_columns()}
    extra_indices = []
    for index in indicator_config.sql_column_indexes:
        if set(index.column_ids).issubset(columns_by_col_id):
            extra_indices.append(Index(
                _custom_index_name(table_name, index.column_ids),
                *index.column_ids
            ))
        else:
            _assert = soft_assert('{}@{}'.format('jemord', 'dimagi.com'))
            _assert(False, "Invalid index specified on {}".format(table_name))
            break
    columns_and_indices = sql_columns + extra_indices
    # todo: needed to add extend_existing=True to support multiple calls to this function for the same table.
    # is that valid?
    return sqlalchemy.Table(
        table_name,
        custom_metadata or metadata,
        extend_existing=True,
        *columns_and_indices
    )
Esempio n. 53
0
from corehq.apps.accounting.models import (
    DefaultProductPlan,
    ProBonoStatus,
    SoftwarePlanEdition,
    SoftwarePlanVisibility,
    Subscription,
    SubscriptionType,
)
from corehq.apps.accounting.signals import subscription_upgrade_or_downgrade
from corehq.apps.domain.signals import commcare_domain_post_save
from corehq.apps.users.signals import couch_user_post_save
from corehq.apps.analytics.utils import get_instance_string

_no_cookie_soft_assert = soft_assert(to=[
    '{}@{}'.format('cellowitz',
                   'dimagi.com'), '{}@{}'.format('biyeun', 'dimagi.com'),
    '{}@{}'.format('jschweers', 'dimagi.com')
],
                                     send_to_ops=False)


@receiver(couch_user_post_save)
def user_save_callback(sender, **kwargs):
    couch_user = kwargs.get("couch_user", None)
    if couch_user and couch_user.is_web_user():
        properties = {}
        properties.update(get_subscription_properties_by_user(couch_user))
        properties.update(get_domain_membership_properties(couch_user))
        identify_v2.delay(couch_user.username, properties)
        update_hubspot_properties_v2(couch_user, properties)

Esempio n. 54
0
from django.contrib.auth import get_user_model
from django.db.models import Q
from django.template import Context, Template
from django.template.loader import render_to_string

from corehq.apps.es.users import UserES
from corehq.apps.hqadmin.models import HistoricalPillowCheckpoint
from corehq.apps.hqwebapp.tasks import send_html_email_async
from corehq.util.soft_assert import soft_assert
from dimagi.utils.logging import notify_error
from dimagi.utils.django.email import send_HTML_email
from dimagi.utils.web import get_site_domain
from pillowtop.utils import get_couch_pillow_instances
from .utils import check_for_rewind

_soft_assert_superusers = soft_assert(notify_admins=True)


@periodic_task(run_every=crontab(hour=0, minute=0), queue='background_queue')
def check_pillows_for_rewind():
    for pillow in get_couch_pillow_instances():
        checkpoint = pillow.checkpoint
        has_rewound, historical_seq = check_for_rewind(checkpoint)
        if has_rewound:
            notify_error(
                message='Found seq number lower than previous for {}. '
                'This could mean we are in a rewind state'.format(
                    checkpoint.checkpoint_id),
                details={
                    'pillow checkpoint seq':
                    checkpoint.get_current_sequence_id(),
Esempio n. 55
0
from corehq.apps.hqwebapp.decorators import use_jquery_ui, \
    use_ko_validation
from corehq.apps.users.models import WebUser, CouchUser
from corehq.apps.users.landing_pages import get_cloudcare_urlname
from django.contrib.auth.models import User

from corehq.util.soft_assert import soft_assert
from dimagi.utils.couch import CriticalSection
from dimagi.utils.couch.resource_conflict import retry_resource
from memoized import memoized
from dimagi.utils.web import get_ip
from corehq.util.context_processors import get_per_domain_context


_domainless_new_user_soft_assert = soft_assert(to=[
    '{}@{}'.format('biyeun', 'dimagi.com')
], send_to_ops=False, fail_if_debug=False)


def get_domain_context():
    return get_per_domain_context(Domain())


def registration_default(request):
    return redirect(UserRegistrationView.urlname)


def track_domainless_new_user(request):
    if settings.UNIT_TESTING:
        # don't trigger soft assert in a test
        return
Esempio n. 56
0
    SQLRepeater,
    domain_can_forward,
    get_payload,
    send_request,
)

_check_repeaters_buckets = make_buckets_from_timedeltas(
    timedelta(seconds=10),
    timedelta(minutes=1),
    timedelta(minutes=5),
    timedelta(hours=1),
    timedelta(hours=5),
    timedelta(hours=10),
)
MOTECH_DEV = '@'.join(('nhooper', 'dimagi.com'))
_soft_assert = soft_assert(to=MOTECH_DEV)
logging = get_task_logger(__name__)

DELETE_CHUNK_SIZE = 5000


@periodic_task(
    run_every=crontab(hour=6, minute=0),
    queue=settings.CELERY_PERIODIC_QUEUE,
)
def delete_old_request_logs():
    """
    Delete RequestLogs older than 90 days.
    """
    ninety_days_ago = datetime.utcnow() - timedelta(days=90)
    while True:
Esempio n. 57
0
from calendar import monthrange
from datetime import datetime, timedelta

from django.db import transaction

from corehq.apps.saved_reports.models import (
    ReportNotification,
    ScheduledReportsCheckpoint,
)
from corehq.util.soft_assert import soft_assert

_soft_assert = soft_assert(
    to='{}@{}'.format('supportteam', 'dimagi.com'),
    exponential_backoff=False,
)


def _make_all_notification_view_keys(period, target):
    """

    :param period: 'hourly', 'daily', 'weekly', or 'monthly'
    :param target: The 15-minute-aligned point in time we are targeting for a match
    :return: generator of couch view kwargs to use in view query for 'reportconfig/all_notifications'
    """
    assert target.minute % 15 == 0
    assert target.second == 0
    assert target.microsecond == 0

    if target.minute == 0:
        # for legacy purposes, on the hour also include reports that didn't have a minute set
        minutes = (None, target.minute)
Esempio n. 58
0
def view_generic(request,
                 domain,
                 app_id=None,
                 module_id=None,
                 form_id=None,
                 copy_app_form=None,
                 release_manager=False):
    """
    This is the main view for the app. All other views redirect to here.

    """
    if form_id and not module_id:
        return bail(request, domain, app_id)

    app = module = form = None
    try:
        if app_id:
            app = get_app(domain, app_id)
        if module_id:
            try:
                module = app.get_module(module_id)
            except ModuleNotFoundException:
                raise Http404()
            if not module.unique_id:
                module.get_or_create_unique_id()
                app.save()
        if form_id:
            try:
                form = module.get_form(form_id)
            except IndexError:
                raise Http404()
    except ModuleNotFoundException:
        return bail(request, domain, app_id)

    # Application states that should no longer exist
    if app:
        if app.application_version == APP_V1:
            _assert = soft_assert()
            _assert(False, 'App version 1.0', {
                'domain': domain,
                'app_id': app_id
            })
            template = get_app_manager_template(
                request.user,
                'app_manager/v1/no_longer_supported.html',
                'app_manager/v2/no_longer_supported.html',
            )
            return render(request, template, {
                'domain': domain,
                'app': app,
            })
        if not app.vellum_case_management and not app.is_remote_app():
            # Soft assert but then continue rendering; template will contain a user-facing warning
            _assert = soft_assert(['jschweers' + '@' + 'dimagi.com'])
            _assert(False, 'vellum_case_management=False', {
                'domain': domain,
                'app_id': app_id
            })
        if (form is not None
                and toggles.USER_PROPERTY_EASY_REFS.enabled(domain)
                and "usercase_preload" in form.actions
                and form.actions.usercase_preload.preload):
            _assert = soft_assert(['dmiller' + '@' + 'dimagi.com'])
            _assert(
                False, 'User property easy refs + old-style config = bad', {
                    'domain': domain,
                    'app_id': app_id,
                    'module_id': module_id,
                    'form_id': form_id,
                })

    context = get_apps_base_context(request, domain, app)
    if app and app.copy_of:
        # don't fail hard.
        return HttpResponseRedirect(
            reverse(
                "view_app",
                args=[domain, app.copy_of]  # TODO - is this right?
            ))

    # grandfather in people who set commcare sense earlier
    if app and 'use_commcare_sense' in app:
        if app['use_commcare_sense']:
            if 'features' not in app.profile:
                app.profile['features'] = {}
            app.profile['features']['sense'] = 'true'
        del app['use_commcare_sense']
        app.save()

    context.update({
        'module': module,
        'form': form,
    })

    lang = context['lang']
    if app and not module and hasattr(app, 'translations'):
        context.update({"translations": app.translations.get(lang, {})})

    if form:
        template, form_context = get_form_view_context_and_template(
            request, domain, form, context['langs'])
        context.update(form_context)
    elif module:
        template = get_module_template(request.user, module)
        # make sure all modules have unique ids
        app.ensure_module_unique_ids(should_save=True)
        module_context = get_module_view_context(app, module, lang)
        context.update(module_context)
    elif app:
        context.update(get_app_view_context(request, app))

        v2_template = ('app_manager/v2/app_view_release_manager.html'
                       if release_manager else
                       'app_manager/v2/app_view_settings.html')

        template = get_app_manager_template(request.user,
                                            'app_manager/v1/app_view.html',
                                            v2_template)

        if release_manager:
            context.update(get_releases_context(request, domain, app_id))
        context.update({
            'is_app_settings_page': not release_manager,
        })
    else:
        if toggles.APP_MANAGER_V2.enabled(request.user.username):
            from corehq.apps.dashboard.views import DomainDashboardView
            return HttpResponseRedirect(
                reverse(DomainDashboardView.urlname, args=[domain]))
        else:
            from corehq.apps.dashboard.views import NewUserDashboardView
            return HttpResponseRedirect(
                reverse(NewUserDashboardView.urlname, args=[domain]))

    # update multimedia context for forms and modules.
    menu_host = form or module
    if menu_host:
        default_file_name = 'module%s' % module_id
        if form_id:
            default_file_name = '%s_form%s' % (default_file_name, form_id)

        specific_media = {
            'menu': {
                'menu_refs':
                app.get_menu_media(module,
                                   module_id,
                                   form=form,
                                   form_index=form_id,
                                   to_language=lang),
                'default_file_name':
                '{name}_{lang}'.format(name=default_file_name, lang=lang),
            }
        }

        if module and module.uses_media():

            def _make_name(suffix):
                return "{default_name}_{suffix}_{lang}".format(
                    default_name=default_file_name,
                    suffix=suffix,
                    lang=lang,
                )

            specific_media['case_list_form'] = {
                'menu_refs':
                app.get_case_list_form_media(module,
                                             module_id,
                                             to_language=lang),
                'default_file_name':
                _make_name('case_list_form'),
            }
            specific_media['case_list_menu_item'] = {
                'menu_refs':
                app.get_case_list_menu_item_media(module,
                                                  module_id,
                                                  to_language=lang),
                'default_file_name':
                _make_name('case_list_menu_item'),
            }
            specific_media['case_list_lookup'] = {
                'menu_refs':
                app.get_case_list_lookup_image(module, module_id),
                'default_file_name':
                '{}_case_list_lookup'.format(default_file_name),
            }

            if hasattr(module, 'product_details'):
                specific_media['product_list_lookup'] = {
                    'menu_refs':
                    app.get_case_list_lookup_image(module,
                                                   module_id,
                                                   type='product'),
                    'default_file_name':
                    '{}_product_list_lookup'.format(default_file_name),
                }

        context.update({
            'multimedia': {
                "object_map": app.get_object_map(),
                'upload_managers': {
                    'icon':
                    MultimediaImageUploadController(
                        "hqimage",
                        reverse(ProcessImageFileUploadView.name,
                                args=[app.domain, app.get_id])),
                    'audio':
                    MultimediaAudioUploadController(
                        "hqaudio",
                        reverse(ProcessAudioFileUploadView.name,
                                args=[app.domain, app.get_id])),
                },
            }
        })
        try:
            context['multimedia']['references'] = app.get_references()
        except ReportConfigurationNotFoundError:
            pass
        context['multimedia'].update(specific_media)

    error = request.GET.get('error', '')

    context.update({
        'error': error,
        'app': app,
    })

    # Pass form for Copy Application to template
    domain_names = [d.name for d in Domain.active_for_user(request.couch_user)]
    domain_names.sort()
    if app and copy_app_form is None:
        toggle_enabled = toggles.EXPORT_ZIPPED_APPS.enabled(
            request.user.username)
        copy_app_form = CopyApplicationForm(
            domain, app, export_zipped_apps_enabled=toggle_enabled)
        context.update({
            'domain_names': domain_names,
        })
    linked_apps_enabled = toggles.LINKED_APPS.enabled(domain)
    context.update({
        'copy_app_form': copy_app_form,
        'linked_apps_enabled': linked_apps_enabled,
    })

    context['latest_commcare_version'] = get_commcare_versions(
        request.user)[-1]

    if app and app.doc_type == 'Application' and has_privilege(
            request, privileges.COMMCARE_LOGO_UPLOADER):
        uploader_slugs = ANDROID_LOGO_PROPERTY_MAPPING.keys()
        from corehq.apps.hqmedia.controller import MultimediaLogoUploadController
        from corehq.apps.hqmedia.views import ProcessLogoFileUploadView
        uploaders = [
            MultimediaLogoUploadController(
                slug,
                reverse(
                    ProcessLogoFileUploadView.name,
                    args=[domain, app_id, slug],
                )) for slug in uploader_slugs
        ]
        context.update({
            "sessionid": request.COOKIES.get('sessionid'),
            "uploaders": uploaders,
            "uploaders_js": [u.js_options for u in uploaders],
            "refs": {
                slug: ApplicationMediaReference(
                    app.logo_refs.get(slug, {}).get("path", slug),
                    media_class=CommCareImage,
                    module_id=app.logo_refs.get(slug, {}).get("m_id"),
                ).as_dict()
                for slug in uploader_slugs
            },
            "media_info": {
                slug: app.logo_refs.get(slug)
                for slug in uploader_slugs if app.logo_refs.get(slug)
            },
        })

    domain_obj = Domain.get_by_name(domain)
    context.update({
        'show_live_preview':
        app
        and should_show_preview_app(request, app, request.couch_user.username),
        'can_preview_form':
        request.couch_user.has_permission(domain, 'edit_data')
    })

    response = render(request, template, context)

    response.set_cookie('lang', encode_if_unicode(lang))
    return response
Esempio n. 59
0
    get_change_status,
    get_dimagi_from_email,
    log_accounting_error,
    log_accounting_info,
)
from corehq.apps.domain.models import Domain
from corehq.apps.hqwebapp.tasks import send_html_email_async
from corehq.apps.notifications.models import Notification
from corehq.apps.users.models import FakeUser, WebUser
from corehq.const import USER_DATE_FORMAT, USER_MONTH_FORMAT
from corehq.util.view_utils import absolute_reverse
from corehq.util.dates import get_previous_month_date_range
from corehq.util.soft_assert import soft_assert

_invoicing_complete_soft_assert = soft_assert(
    to='{}@{}'.format('npellegrino', 'dimagi.com'),
    exponential_backoff=False,
)

CONSISTENT_DATES_CHECK = Q(date_start__lt=F('date_end')) | Q(
    date_end__isnull=True)


@transaction.atomic
def _activate_subscription(subscription):
    subscription.is_active = True
    subscription.save()
    upgraded_privs = get_change_status(
        None, subscription.plan_version).upgraded_privs
    subscription.subscriber.activate_subscription(
        upgraded_privileges=upgraded_privs,
        subscription=subscription,
Esempio n. 60
0
from corehq.apps.accounting.utils.subscription import ensure_community_or_paused_subscription
from corehq.apps.analytics.tasks import (
    HUBSPOT_CREATED_NEW_PROJECT_SPACE_FORM_ID,
    send_hubspot_form,
)
from corehq.apps.domain.models import Domain
from corehq.apps.hqwebapp.tasks import send_html_email_async, send_mail_async
from corehq.apps.registration.models import RegistrationRequest
from corehq.apps.registration.tasks import send_domain_registration_email
from corehq.apps.users.models import CouchUser, UserRole, WebUser
from corehq.util.view_utils import absolute_reverse

APPCUES_APP_SLUGS = ['health', 'agriculture', 'wash']

_soft_assert_registration_issues = soft_assert(
    to=['{}@{}'.format(name, 'dimagi.com') for name in ['biyeun']],
    exponential_backoff=False,
)


def activate_new_user(form, is_domain_admin=True, domain=None, ip=None):
    username = form.cleaned_data['email']
    password = form.cleaned_data['password']
    full_name = form.cleaned_data['full_name']
    now = datetime.utcnow()

    new_user = WebUser.create(domain,
                              username,
                              password,
                              is_admin=is_domain_admin)
    new_user.first_name = full_name[0]
    new_user.last_name = full_name[1]