示例#1
0
def _generate_incremental_export(incremental_export):
    export_instance = incremental_export.export_instance
    export_instance.export_format = Format.UNZIPPED_CSV  # force to unzipped CSV
    checkpoint = incremental_export.last_valid_checkpoint

    # Remove the date period from the ExportInstance, since this is added automatically by Daily Saved exports
    export_instance.filters.date_period = None
    filters = export_instance.get_filters()
    if checkpoint:
        filters.append(
            ServerModifiedOnRangeFilter(gt=checkpoint.last_doc_date))

    class LastDocTracker:
        def __init__(self, doc_iterator):
            self.doc_iterator = doc_iterator
            self.last_doc = None
            self.doc_count = 0

        def __iter__(self):
            for doc in self.doc_iterator:
                self.last_doc = doc
                self.doc_count += 1
                yield doc

    with TransientTempfile() as temp_path, metrics_track_errors(
            'generate_incremental_exports'):
        writer = get_export_writer([export_instance],
                                   temp_path,
                                   allow_pagination=False)
        with writer.open([export_instance]):
            query = _get_export_query(export_instance, filters)
            query = query.sort('server_modified_on'
                               )  # reset sort to this instead of opened_on
            docs = LastDocTracker(query.run().hits)
            write_export_instance(writer, export_instance, docs)

        export_file = ExportFile(writer.path, writer.format)

        if docs.doc_count <= 0:
            return

        new_checkpoint = incremental_export.checkpoint(
            docs.doc_count, docs.last_doc.get('server_modified_on'))

        with export_file as file_:
            db = get_blob_db()
            db.put(file_,
                   domain=incremental_export.domain,
                   parent_id=new_checkpoint.blob_parent_id,
                   type_code=CODES.data_export,
                   key=str(new_checkpoint.blob_key),
                   timeout=24 * 60)
    return new_checkpoint
示例#2
0
def process_bounced_emails():
    if settings.RETURN_PATH_EMAIL and settings.RETURN_PATH_EMAIL_PASSWORD:
        try:
            with BouncedEmailManager(
                    delete_processed_messages=True
            ) as bounced_manager, metrics_track_errors(
                    'process_bounced_emails_task'):
                bounced_manager.process_daemon_messages()
        except Exception as e:
            notify_exception(
                None,
                message="Encountered error while processing bounced emails",
                details={
                    'error': e,
                })
示例#3
0
def populate_export_download_task(domain,
                                  export_ids,
                                  exports_type,
                                  username,
                                  es_filters,
                                  download_id,
                                  owner_id,
                                  filename=None,
                                  expiry=10 * 60):
    """
    :param expiry:  Time period for the export to be available for download in minutes
    """

    email_requests = EmailExportWhenDoneRequest.objects.filter(
        domain=domain, download_id=download_id)

    if settings.STALE_EXPORT_THRESHOLD is not None and not email_requests.count(
    ):
        delay = get_task_time_to_start(
            populate_export_download_task.request.id)
        if delay.total_seconds() > settings.STALE_EXPORT_THRESHOLD:
            metrics_counter('commcare.exports.rejected_unfresh_export')
            raise RejectedStaleExport()

    export_instances = [
        get_export(exports_type, domain, export_id, username)
        for export_id in export_ids
    ]
    with TransientTempfile() as temp_path, metrics_track_errors(
            'populate_export_download_task'):
        export_file = get_export_file(
            export_instances,
            es_filters,
            temp_path,
            # We don't have a great way to calculate progress if it's a bulk download,
            # so only track the progress for single instance exports.
            progress_tracker=populate_export_download_task
            if len(export_instances) == 1 else None)

        file_format = Format.from_format(export_file.format)
        filename = filename or export_instances[0].name

        with export_file as file_:
            db = get_blob_db()
            db.put(
                file_,
                domain=domain,
                parent_id=domain,
                type_code=CODES.data_export,
                key=download_id,
                timeout=expiry,
            )

            expose_blob_download(
                download_id,
                expiry=expiry * 60,
                mimetype=file_format.mimetype,
                content_disposition=safe_filename_header(
                    filename, file_format.extension),
                download_id=download_id,
                owner_ids=[owner_id],
            )

    for email_request in email_requests:
        try:
            couch_user = CouchUser.get_by_user_id(email_request.user_id,
                                                  domain=domain)
        except CouchUser.AccountTypeError:
            pass
        else:
            if couch_user is not None:
                process_email_request(domain, download_id,
                                      couch_user.get_email())
    email_requests.delete()