def _test_success(self, auto_flush):
     kafka_producer = ChangeProducer(auto_flush=auto_flush)
     with capture_log_output(KAFKA_AUDIT_LOGGER) as logs:
         meta = ChangeMeta(document_id=uuid.uuid4().hex, data_source_type='dummy-type',
                           data_source_name='dummy-name')
         kafka_producer.send_change(topics.CASE, meta)
         if not auto_flush:
             kafka_producer.flush()
     self._check_logs(logs, meta.document_id, [CHANGE_PRE_SEND, CHANGE_SENT])
예제 #2
0
    def handle(self, pillow, **options):
        self.pool = Pool(10)
        self.pillow = pillow
        self.count = 0
        self.start = time.time()
        self.producer = ChangeProducer(auto_flush=False)

        for errors in self.get_next_errors():
            self.pool.spawn(self._process_errors, errors)
    def test_error_asynchronous(self):
        kafka_producer = ChangeProducer(auto_flush=False)
        future = Future()
        kafka_producer.producer.send = Mock(return_value=future)

        meta = ChangeMeta(
            document_id=uuid.uuid4().hex, data_source_type='dummy-type', data_source_name='dummy-name'
        )

        with capture_log_output(KAFKA_AUDIT_LOGGER) as logs:
            kafka_producer.send_change(topics.CASE, meta)
            future.failure(Exception())

        self._check_logs(logs, meta.document_id, [CHANGE_PRE_SEND, CHANGE_ERROR])
    def test_error_synchronous(self):
        kafka_producer = ChangeProducer()
        future = Future()
        future.get = Mock(side_effect=Exception())
        kafka_producer.producer.send = Mock(return_value=future)

        meta = ChangeMeta(
            document_id=uuid.uuid4().hex, data_source_type='dummy-type', data_source_name='dummy-name'
        )

        with capture_log_output(KAFKA_AUDIT_LOGGER) as logs:
            with self.assertRaises(Exception):
                kafka_producer.send_change(topics.CASE, meta)

        self._check_logs(logs, meta.document_id, [CHANGE_PRE_SEND, CHANGE_ERROR])
    def handle(self, pillow_name, **options):
        self.pool = Pool(10)
        self.pillow_name = pillow_name

        try:
            pillow = get_pillow_by_name(pillow_name)
        except PillowNotFoundError:
            raise CommandError(f"Unknown pillow: {pillow_name}")

        if not isinstance(pillow.get_change_feed(), KafkaChangeFeed):
            raise CommandError(f"Only Kafka pillows are supported")

        self.count = 0
        self.start = time.time()
        self.producer = ChangeProducer(auto_flush=False)

        for errors in self.get_next_errors():
            self.pool.spawn(self._process_errors, errors)
예제 #6
0
from django import db
from django.core.management import BaseCommand

import pytz
from psycopg2._psycopg import InterfaceError

from dimagi.utils.logging import notify_exception
from pillow_retry.api import process_pillow_retry
from pillow_retry.models import PillowError

from corehq.apps.change_feed.producer import ChangeProducer
from corehq.sql_db.util import handle_connection_failure

BATCH_SIZE = 10000

producer = ChangeProducer(auto_flush=False)


class PillowRetryEnqueuingOperation(BaseCommand):
    help = "Runs the Pillow Retry Queue"

    def handle(self, **options):
        while True:
            try:
                num_processed = self.process_queue()
            except Exception:
                num_processed = 0
                notify_exception(None,
                                 message="Could not fetch due survey actions")
            sleep_time = 10 if num_processed < BATCH_SIZE else 0
            sleep(sleep_time)
예제 #7
0
 def __init__(self, data_source_type, data_source_name, default_topic):
     self._producer = ChangeProducer()
     self._data_source_type = data_source_type
     self._data_source_name = data_source_name
     self._default_topic = default_topic
예제 #8
0
 def __init__(self, data_source_type, data_source_name):
     self._producer = ChangeProducer()
     self._data_source_type = data_source_type
     self._data_source_name = data_source_name
예제 #9
0
 def __init__(self, kafka, data_source_type, data_source_name):
     self._kafka = kafka
     self._producer = ChangeProducer(self._kafka)
     self._data_source_type = data_source_type
     self._data_source_name = data_source_name