Exemple #1
0
                repeat_record.doc_type += DELETED_SUFFIX
                repeat_record.save()
        elif repeater.paused:
            # postpone repeat record by MAX_RETRY_WAIT so that these don't get picked in each cycle and
            # thus clogging the queue with repeat records with paused repeater
            repeat_record.postpone_by(MAX_RETRY_WAIT)
        elif repeat_record.state == RECORD_PENDING_STATE or repeat_record.state == RECORD_FAILURE_STATE:
            repeat_record.fire()
    except Exception:
        logging.exception('Failed to process repeat record: {}'.format(
            repeat_record._id))


metrics_gauge_task(
    'commcare.repeaters.overdue',
    get_overdue_repeat_record_count,
    run_every=crontab(),  # every minute
    multiprocess_mode=MPM_MAX)


@task(queue=settings.CELERY_REPEAT_RECORD_QUEUE)
def process_repeater(repeater_id: int):
    """
    Worker task to send SQLRepeatRecords in chronological order.

    This function assumes that ``repeater`` checks have already
    been performed. Call via ``models.attempt_forward_now()``.
    """
    repeater = SQLRepeater.objects.get(id=repeater_id)
    with CriticalSection(
        [f'process-repeater-{repeater.repeater_id}'],
Exemple #2
0
        # Delete entries that should not exist
        for phone_number in phone_entries.keys():
            if phone_number not in numbers_that_should_exist:
                phone_entries[phone_number].delete()

        # Create entries that should exist but do not exist
        for phone_number in numbers_that_should_exist:
            if phone_number not in phone_entries:
                try:
                    couch_user.create_phone_entry(phone_number)
                except InvalidFormatException:
                    pass


@no_result_task(queue='background_queue', acks_late=True,
                default_retry_delay=5 * 60, max_retries=10, bind=True)
def publish_sms_change(self, sms_id):
    sms = SMS.objects.get(sms_id)
    try:
        publish_sms_saved(sms)
    except Exception as e:
        self.retry(exc=e)


def queued_sms():
    return QueuedSMS.objects.count()


metrics_gauge_task('commcare.sms.queued', queued_sms, run_every=crontab(),
                   multiprocess_mode=MPM_MAX)
Exemple #3
0
        numbers_that_should_exist = [apply_leniency(phone_number) for phone_number in couch_user.phone_numbers]

        # Delete entries that should not exist
        for phone_number in phone_entries.keys():
            if phone_number not in numbers_that_should_exist:
                phone_entries[phone_number].delete()

        # Create entries that should exist but do not exist
        for phone_number in numbers_that_should_exist:
            if phone_number not in phone_entries:
                try:
                    couch_user.create_phone_entry(phone_number)
                except InvalidFormatException:
                    pass


@no_result_task(serializer='pickle', queue='background_queue', acks_late=True,
                default_retry_delay=5 * 60, max_retries=10, bind=True)
def publish_sms_change(self, sms):
    try:
        publish_sms_saved(sms)
    except Exception as e:
        self.retry(exc=e)


def queued_sms():
    return QueuedSMS.objects.count()


metrics_gauge_task('commcare.sms.queued', queued_sms, run_every=crontab())
Exemple #4
0

@periodic_task(run_every=crontab(minute=0, hour=3), queue='background_queue')
def clean_expired_transient_emails():
    try:
        TransientBounceEmail.delete_expired_bounces()
    except Exception as e:
        notify_exception(None,
                         message="Encountered error while deleting expired "
                         "transient bounce emails",
                         details={
                             'error': e,
                         })


def get_maintenance_alert_active():
    from corehq.apps.hqwebapp.models import MaintenanceAlert
    return 1 if MaintenanceAlert.get_latest_alert() else 0


metrics_gauge_task('commcare.maintenance_alerts.active',
                   get_maintenance_alert_active,
                   run_every=crontab(minute=1),
                   multiprocess_mode=MPM_MAX)


@periodic_task(run_every=crontab(minute=0, hour=4))
def clear_expired_oauth_tokens():
    # https://django-oauth-toolkit.readthedocs.io/en/latest/management_commands.html#cleartokens
    call_command('cleartokens')
Exemple #5
0
        return exit_celery_with_error_message(bulk_import_async,
                                              get_importer_error_message(e))
    finally:
        store_task_result.delay(excel_id)


@task(serializer='pickle', queue='case_import_queue')
def store_task_result(upload_id):
    case_upload = CaseUpload.get(upload_id)
    case_upload.store_task_result()


def _alert_on_result(result, domain):
    """ Check import result and send internal alerts based on result

    :param result: dict that should include key "created_count" pointing to an int
    """

    if result['created_count'] > 10000:
        message = "A case import just uploaded {num} new cases to HQ. {domain} might be scaling operations".format(
            num=result['created_count'], domain=domain)
        alert = AbnormalUsageAlert(source="case importer",
                                   domain=domain,
                                   message=message)
        send_abnormal_usage_alert.delay(alert)


total_bytes = metrics_gauge_task('commcare.case_importer.files.total_bytes',
                                 get_case_upload_files_total_bytes,
                                 run_every=crontab(minute=0))
Exemple #6
0
        return

    repeater = repeat_record.repeater
    if not repeater:
        repeat_record.cancel()
        repeat_record.save()
        return

    try:
        if repeater.paused:
            # postpone repeat record by 1 day so that these don't get picked in each cycle and
            # thus clogging the queue with repeat records with paused repeater
            repeat_record.postpone_by(timedelta(days=1))
            return
        if repeater.doc_type.endswith(DELETED_SUFFIX):
            if not repeat_record.doc_type.endswith(DELETED_SUFFIX):
                repeat_record.doc_type += DELETED_SUFFIX
                repeat_record.save()
        elif repeat_record.state == RECORD_PENDING_STATE or repeat_record.state == RECORD_FAILURE_STATE:
            repeat_record.fire()
    except Exception:
        logging.exception('Failed to process repeat record: {}'.format(
            repeat_record._id))


repeaters_overdue = metrics_gauge_task(
    'commcare.repeaters.overdue',
    get_overdue_repeat_record_count,
    run_every=crontab()  # every minute
)
        )
        self.retry(exc=e)


@periodic_task(run_every=crontab(minute=0, hour=0), queue='background_queue')
def process_bounced_emails():
    if settings.RETURN_PATH_EMAIL and settings.RETURN_PATH_EMAIL_PASSWORD:
        try:
            with BouncedEmailManager(
                delete_processed_messages=True
            ) as bounced_manager, metrics_track_errors('process_bounced_emails_task'):
                bounced_manager.process_aws_notifications()
                bounced_manager.process_daemon_messages()
        except Exception as e:
            notify_exception(
                None,
                message="Encountered error while processing bounced emails",
                details={
                    'error': e,
                }
            )


def get_maintenance_alert_active():
    from corehq.apps.hqwebapp.models import MaintenanceAlert
    return 1 if MaintenanceAlert.get_latest_alert() else 0


metrics_gauge_task('commcare.maintenance_alerts.active', get_maintenance_alert_active,
                   run_every=crontab(minute=1))
Exemple #8
0
    finally:
        if not result_stored:
            store_failed_task_result.delay(excel_id)


@task(queue='case_import_queue')
def store_failed_task_result(upload_id):
    case_upload = CaseUpload.get(upload_id)
    case_upload.store_failed_task_result()


def _alert_on_result(result, domain):
    """ Check import result and send internal alerts based on result

    :param result: dict that should include key "created_count" pointing to an int
    """

    if result['created_count'] > 10000:
        message = "A case import just uploaded {num} new cases to HQ. {domain} might be scaling operations".format(
            num=result['created_count'], domain=domain)
        alert = AbnormalUsageAlert(source="case importer",
                                   domain=domain,
                                   message=message)
        send_abnormal_usage_alert.delay(alert)


metrics_gauge_task('commcare.case_importer.files.total_bytes',
                   get_case_upload_files_total_bytes,
                   run_every=crontab(minute=0),
                   multiprocess_mode=MPM_MAX)