예제 #1
0
    def test_archive_last_form(self):
        initial_amounts = [(p._id, float(100)) for p in self.products]
        self.submit_xml_form(
            balance_submission(initial_amounts),
            timestamp=datetime.utcnow() + timedelta(-30)
        )

        final_amounts = [(p._id, float(50)) for i, p in enumerate(self.products)]
        second_form_id = self.submit_xml_form(balance_submission(final_amounts))

        ledger_accessors = LedgerAccessors(self.domain.name)
        def _assert_initial_state():
            if should_use_sql_backend(self.domain.name):
                self.assertEqual(3, LedgerTransaction.objects.filter(form_id=second_form_id).count())
            else:
                self.assertEqual(1, StockReport.objects.filter(form_id=second_form_id).count())
                # 6 = 3 stockonhand and 3 inferred consumption txns
                self.assertEqual(6, StockTransaction.objects.filter(report__form_id=second_form_id).count())

            ledger_values = ledger_accessors.get_ledger_values_for_case(self.sp.case_id)
            self.assertEqual(3, len(ledger_values))
            for lv in ledger_values:
                self.assertEqual(50, lv.stock_on_hand)
                self.assertEqual(
                    round(float(lv.daily_consumption), 2),
                    1.67
                )

        # check initial setup
        _assert_initial_state()

        # archive and confirm commtrack data is deleted
        form = FormAccessors(self.domain.name).get_form(second_form_id)
        with process_kafka_changes('LedgerToElasticsearchPillow', topics.LEDGER):
            form.archive()

        if should_use_sql_backend(self.domain.name):
            self.assertEqual(0, LedgerTransaction.objects.filter(form_id=second_form_id).count())
        else:
            self.assertEqual(0, StockReport.objects.filter(form_id=second_form_id).count())
            self.assertEqual(0, StockTransaction.objects.filter(report__form_id=second_form_id).count())

        ledger_values = ledger_accessors.get_ledger_values_for_case(self.sp.case_id)
        self.assertEqual(3, len(ledger_values))
        for state in ledger_values:
            # balance should be reverted to 100 in the StockState
            self.assertEqual(100, int(state.stock_on_hand))
            # consumption should be none since there will only be 1 data point
            self.assertIsNone(state.daily_consumption)

        # unarchive and confirm commtrack data is restored
        with process_kafka_changes('LedgerToElasticsearchPillow', topics.LEDGER):
            form.unarchive()
        _assert_initial_state()
예제 #2
0
    def test_duplicate_form_published(self):
        form_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(form_id)
        orig_form = submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(form_id, orig_form.form_id)
        self.assertEqual(1,
                         len(self.form_accessors.get_all_form_ids_in_domain()))

        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                # post an exact duplicate
                dupe_form = submit_form_locally(form_xml,
                                                domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)
                self.assertNotEqual(form_id, dupe_form.form_id)
                if should_use_sql_backend(self.domain):
                    self.assertEqual(form_id, dupe_form.orig_id)

        # make sure changes made it to kafka
        dupe_form_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(dupe_form.form_id, dupe_form_meta.document_id)
        self.assertEqual(dupe_form.domain, dupe_form.domain)
        if should_use_sql_backend(self.domain):
            # sql domains also republish the original form to ensure that if the server crashed
            # in the processing of the form the first time that it is still sent to kafka
            orig_form_meta = self.processor.changes_seen[1].metadata
            self.assertEqual(orig_form.form_id, orig_form_meta.document_id)
            self.assertEqual(self.domain, orig_form_meta.domain)
            self.assertEqual(dupe_form.domain, dupe_form.domain)
예제 #3
0
    def test_duplicate_form_published(self):
        form_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(form_id)
        orig_form = submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(form_id, orig_form.form_id)
        self.assertEqual(1, len(self.form_accessors.get_all_form_ids_in_domain()))

        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                # post an exact duplicate
                dupe_form = submit_form_locally(form_xml, domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)
                self.assertNotEqual(form_id, dupe_form.form_id)
                if should_use_sql_backend(self.domain):
                    self.assertEqual(form_id, dupe_form.orig_id)

        # make sure changes made it to kafka
        dupe_form_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(dupe_form.form_id, dupe_form_meta.document_id)
        self.assertEqual(dupe_form.domain, dupe_form.domain)
        if should_use_sql_backend(self.domain):
            # sql domains also republish the original form to ensure that if the server crashed
            # in the processing of the form the first time that it is still sent to kafka
            orig_form_meta = self.processor.changes_seen[1].metadata
            self.assertEqual(orig_form.form_id, orig_form_meta.document_id)
            self.assertEqual(self.domain, orig_form_meta.domain)
            self.assertEqual(dupe_form.domain, dupe_form.domain)
예제 #4
0
 def _create_case_and_sync_to_es(self, domain):
     case_id = uuid.uuid4().hex
     case_name = "case-name-{}".format(uuid.uuid4().hex)
     with process_kafka_changes("ReportCaseToElasticsearchPillow"):
         with process_couch_changes("DefaultChangeFeedPillow"):
             create_and_save_a_case(domain, case_id, case_name)
     self.elasticsearch.indices.refresh(REPORT_CASE_INDEX_INFO.index)
     return case_id, case_name
예제 #5
0
 def _create_case_and_sync_to_es(self, domain):
     case_id = uuid.uuid4().hex
     case_name = 'case-name-{}'.format(uuid.uuid4().hex)
     with process_kafka_changes('ReportCaseToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             create_and_save_a_case(domain, case_id, case_name)
     self.elasticsearch.indices.refresh(REPORT_CASE_INDEX_INFO.index)
     return case_id, case_name
예제 #6
0
 def _create_case_and_sync_to_es(self):
     case_id = uuid.uuid4().hex
     case_name = 'case-name-{}'.format(uuid.uuid4().hex)
     with process_kafka_changes('CaseToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             create_and_save_a_case(self.domain, case_id, case_name)
     self.elasticsearch.indices.refresh(CASE_INDEX_INFO.index)
     return case_id, case_name
예제 #7
0
 def _create_form_and_sync_to_es(self, domain):
     with process_kafka_changes('ReportXFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=domain)
             form = get_form_ready_to_save(metadata)
             FormProcessorInterface(domain=domain).save_processed_models([form])
     self.elasticsearch.indices.refresh(REPORT_XFORM_INDEX_INFO.index)
     return form, metadata
예제 #8
0
 def _create_form_and_sync_to_es(self):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=self.domain)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=self.domain)
             form_processor.save_processed_models([form])
     self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
     return form, metadata
예제 #9
0
    def test_form_is_published(self):
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form = create_and_save_a_form(self.domain)

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)
예제 #10
0
 def _create_form_and_sync_to_es(self):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=self.domain)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=self.domain)
             form_processor.save_processed_models([form])
     self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
     return form, metadata
예제 #11
0
 def _create_form_and_sync_to_es(self, domain):
     with process_kafka_changes('ReportXFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=domain)
             form = get_form_ready_to_save(metadata)
             FormProcessorInterface(domain=domain).save_processed_models(
                 [form])
     self.elasticsearch.indices.refresh(REPORT_XFORM_INDEX_INFO.index)
     return form, metadata
예제 #12
0
    def test_form_is_published(self):
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form = create_and_save_a_form(self.domain)

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)
예제 #13
0
 def create_form_and_sync_to_es(received_on):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=cls.domain, app_id=cls.app_id,
                                         xmlns=cls.xmlns, received_on=received_on)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=cls.domain)
             form_processor.save_processed_models([form])
     return form
예제 #14
0
    def test_case_is_published(self):
        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                case = create_and_save_a_case(self.domain, case_id=uuid.uuid4().hex, case_name='test case')

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)
예제 #15
0
    def test_form_soft_deletions(self):
        form = create_and_save_a_form(self.domain)
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form.soft_delete()

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertTrue(change_meta.is_deletion)
예제 #16
0
    def test_form_soft_deletions(self):
        form = create_and_save_a_form(self.domain)
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form.soft_delete()

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertTrue(change_meta.is_deletion)
예제 #17
0
    def test_pre_set_defaults(self):
        set_default_monthly_consumption_for_domain(self.domain.name, 5 * 30)
        with process_kafka_changes('LedgerToElasticsearchPillow', topics.LEDGER):
            self.report(25, 0)
        state = StockState.objects.get(
            section_id='stock',
            case_id=self.sp.case_id,
            product_id=self.products[0]._id,
        )

        self.assertEqual(5, float(state.get_daily_consumption()))
예제 #18
0
    def test_pre_set_defaults(self):
        set_default_monthly_consumption_for_domain(self.domain.name, 5 * 30)
        with process_kafka_changes('LedgerToElasticsearchPillow'):
            self.report(25, 0)
        state = StockState.objects.get(
            section_id='stock',
            case_id=self.sp.case_id,
            product_id=self.products[0]._id,
        )

        self.assertEqual(5, float(state.get_daily_consumption()))
예제 #19
0
 def create_form_and_sync_to_es(received_on):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=cls.domain,
                                         app_id=cls.app_id,
                                         xmlns=cls.xmlns,
                                         received_on=received_on)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=cls.domain)
             form_processor.save_processed_models([form])
     return form
예제 #20
0
    def test_none_with_no_defaults(self):
        # need to submit something to have a state initialized
        with process_kafka_changes('LedgerToElasticsearchPillow'):
            self.report(25, 0)

        state = StockState.objects.get(
            section_id='stock',
            case_id=self.sp.case_id,
            product_id=self.products[0]._id,
        )

        self.assertEqual(None, state.get_daily_consumption())
예제 #21
0
    def test_none_with_no_defaults(self):
        # need to submit something to have a state initialized
        with process_kafka_changes('LedgerToElasticsearchPillow', topics.LEDGER):
            self.report(25, 0)

        state = StockState.objects.get(
            section_id='stock',
            case_id=self.sp.case_id,
            product_id=self.products[0]._id,
        )

        self.assertEqual(None, state.get_daily_consumption())
예제 #22
0
    def test_stock_state(self):
        with process_kafka_changes('LedgerToElasticsearchPillow'):
            self.report(25, 5)
            self.report(10, 0)

        state = StockState.objects.get(
            section_id='stock',
            case_id=self.sp.case_id,
            product_id=self.products[0]._id,
        )

        self.assertEqual(10, state.stock_on_hand)
        self.assertEqual(3.0, state.get_daily_consumption())
예제 #23
0
    def test_stock_state(self):
        with process_kafka_changes('LedgerToElasticsearchPillow', topics.LEDGER):
            self.report(25, 5)
            self.report(10, 0)

        state = StockState.objects.get(
            section_id='stock',
            case_id=self.sp.case_id,
            product_id=self.products[0]._id,
        )

        self.assertEqual(10, state.stock_on_hand)
        self.assertEqual(3.0, state.get_daily_consumption())
예제 #24
0
    def test_form_soft_deletion(self):
        form, metadata = self._create_form_and_sync_to_es()

        # verify there
        results = FormES().run()
        self.assertEqual(1, results.total)

        # soft delete the form
        with process_kafka_changes('XFormToElasticsearchPillow'):
            with process_couch_changes('DefaultChangeFeedPillow'):
                FormAccessors(self.domain).soft_delete_forms([form.form_id])
        self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)

        # ensure not there anymore
        results = FormES().run()
        self.assertEqual(0, results.total)
예제 #25
0
    def test_form_soft_deletion(self):
        form, metadata = self._create_form_and_sync_to_es()

        # verify there
        results = FormES().run()
        self.assertEqual(1, results.total)

        # soft delete the form
        with process_kafka_changes('XFormToElasticsearchPillow'):
            with process_couch_changes('DefaultChangeFeedPillow'):
                FormAccessors(self.domain).soft_delete_forms([form.form_id])
        self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)

        # ensure not there anymore
        results = FormES().run()
        self.assertEqual(0, results.total)
예제 #26
0
    def test_case_soft_deletion(self):
        case_id, case_name = self._create_case_and_sync_to_es()

        # verify there
        results = CaseES().run()
        self.assertEqual(1, results.total)

        # soft delete the case
        with process_kafka_changes('CaseToElasticsearchPillow'):
            with process_couch_changes('DefaultChangeFeedPillow'):
                CaseAccessors(self.domain).soft_delete_cases([case_id])
        self.elasticsearch.indices.refresh(CASE_INDEX_INFO.index)

        # ensure not there anymore
        results = CaseES().run()
        self.assertEqual(0, results.total)
예제 #27
0
    def test_duplicate_case_published(self):
        # this test only runs on sql because it's handling a sql-specific edge case where duplicate
        # form submissions should cause cases to be resubmitted.
        # see: http://manage.dimagi.com/default.asp?228463 for context
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(1, len(CaseAccessors(self.domain).get_case_ids_in_domain()))

        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form_xml, domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)

        # check the case was republished
        self.assertEqual(1, len(self.processor.changes_seen))
        case_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case_id, case_meta.document_id)
        self.assertEqual(self.domain, case_meta.domain)
예제 #28
0
    def test_duplicate_ledger_published(self):
        # this test also only runs on the sql backend for reasons described in test_duplicate_case_published
        # setup products and case
        product_a = make_product(self.domain, 'A Product', 'prodcode_a')
        product_b = make_product(self.domain, 'B Product', 'prodcode_b')
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]

        # submit ledger data
        balances = (
            (product_a._id, 100),
            (product_b._id, 50),
        )
        ledger_blocks = [
            get_single_balance_block(case_id, prod_id, balance)
            for prod_id, balance in balances
        ]
        form = submit_case_blocks(ledger_blocks, self.domain)[0]

        # submit duplicate
        with process_kafka_changes(self.ledger_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form.get_xml(),
                                                domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)

        # confirm republished
        ledger_meta_a = self.processor.changes_seen[0].metadata
        ledger_meta_b = self.processor.changes_seen[1].metadata
        format_id = lambda product_id: '{}/{}/{}'.format(
            case_id, 'stock', product_id)
        expected_ids = {format_id(product_a._id), format_id(product_b._id)}
        for meta in [ledger_meta_a, ledger_meta_b]:
            self.assertTrue(meta.document_id in expected_ids)
            expected_ids.remove(meta.document_id)
            self.assertEqual(self.domain, meta.domain)

        # cleanup
        product_a.delete()
        product_b.delete()
예제 #29
0
    def test_duplicate_case_published(self):
        # this test only runs on sql because it's handling a sql-specific edge case where duplicate
        # form submissions should cause cases to be resubmitted.
        # see: http://manage.dimagi.com/default.asp?228463 for context
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(
            1, len(CaseAccessors(self.domain).get_case_ids_in_domain()))

        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form_xml,
                                                domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)

        # check the case was republished
        self.assertEqual(1, len(self.processor.changes_seen))
        case_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case_id, case_meta.document_id)
        self.assertEqual(self.domain, case_meta.domain)
예제 #30
0
    def test_duplicate_ledger_published(self):
        # this test also only runs on the sql backend for reasons described in test_duplicate_case_published
        # setup products and case
        product_a = make_product(self.domain, 'A Product', 'prodcode_a')
        product_b = make_product(self.domain, 'B Product', 'prodcode_b')
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]

        # submit ledger data
        balances = (
            (product_a._id, 100),
            (product_b._id, 50),
        )
        ledger_blocks = [
            get_single_balance_block(case_id, prod_id, balance)
            for prod_id, balance in balances
        ]
        form = submit_case_blocks(ledger_blocks, self.domain)[0]

        # submit duplicate
        with process_kafka_changes(self.ledger_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form.get_xml(), domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)


        # confirm republished
        ledger_meta_a = self.processor.changes_seen[0].metadata
        ledger_meta_b = self.processor.changes_seen[1].metadata
        format_id = lambda product_id: '{}/{}/{}'.format(case_id, 'stock', product_id)
        expected_ids = {format_id(product_a._id), format_id(product_b._id)}
        for meta in [ledger_meta_a, ledger_meta_b]:
            self.assertTrue(meta.document_id in expected_ids)
            expected_ids.remove(meta.document_id)
            self.assertEqual(self.domain, meta.domain)

        # cleanup
        product_a.delete()
        product_b.delete()
예제 #31
0
    def test_archive_last_form(self):
        initial_amounts = [(p._id, float(100)) for p in self.products]
        self.submit_xml_form(balance_submission(initial_amounts),
                             timestamp=datetime.utcnow() + timedelta(-30))

        final_amounts = [(p._id, float(50))
                         for i, p in enumerate(self.products)]
        second_form_id = self.submit_xml_form(
            balance_submission(final_amounts))

        ledger_accessors = LedgerAccessors(self.domain.name)

        def _assert_initial_state():
            if should_use_sql_backend(self.domain):
                self.assertEqual(
                    3,
                    LedgerTransaction.objects.filter(
                        form_id=second_form_id).count())
            else:
                self.assertEqual(
                    1,
                    StockReport.objects.filter(form_id=second_form_id).count())
                # 6 = 3 stockonhand and 3 inferred consumption txns
                self.assertEqual(
                    6,
                    StockTransaction.objects.filter(
                        report__form_id=second_form_id).count())

            ledger_values = ledger_accessors.get_ledger_values_for_case(
                self.sp.case_id)
            self.assertEqual(3, len(ledger_values))
            for lv in ledger_values:
                self.assertEqual(50, lv.stock_on_hand)
                self.assertEqual(round(float(lv.daily_consumption), 2), 1.67)

        # check initial setup
        _assert_initial_state()

        # archive and confirm commtrack data is deleted
        form = FormAccessors(self.domain.name).get_form(second_form_id)
        with process_kafka_changes('LedgerToElasticsearchPillow'):
            form.archive()

        if should_use_sql_backend(self.domain):
            self.assertEqual(
                0,
                LedgerTransaction.objects.filter(
                    form_id=second_form_id).count())
        else:
            self.assertEqual(
                0,
                StockReport.objects.filter(form_id=second_form_id).count())
            self.assertEqual(
                0,
                StockTransaction.objects.filter(
                    report__form_id=second_form_id).count())

        ledger_values = ledger_accessors.get_ledger_values_for_case(
            self.sp.case_id)
        self.assertEqual(3, len(ledger_values))
        for state in ledger_values:
            # balance should be reverted to 100 in the StockState
            self.assertEqual(100, int(state.stock_on_hand))
            # consumption should be none since there will only be 1 data point
            self.assertIsNone(state.daily_consumption)

        # unarchive and confirm commtrack data is restored
        with process_kafka_changes('LedgerToElasticsearchPillow'):
            form.unarchive()
        _assert_initial_state()
예제 #32
0
            with process_couch_changes('DefaultChangeFeedPillow'):
                case = create_and_save_a_case(self.domain,
                                              case_id=uuid.uuid4().hex,
                                              case_name='test case')

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

    @run_with_all_backends
    def test_case_deletions(self):
        case = create_and_save_a_case(self.domain,
                                      case_id=uuid.uuid4().hex,
                                      case_name='test case')
        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                case.soft_delete()

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertTrue(change_meta.is_deletion)

    def test_duplicate_case_published(self):
        # this test only runs on sql because it's handling a sql-specific edge case where duplicate
        # form submissions should cause cases to be resubmitted.
        # see: http://manage.dimagi.com/default.asp?228463 for context
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]
예제 #33
0
 def test_case_is_published(self):
     with process_kafka_changes(self.case_pillow):
         with process_couch_changes('DefaultChangeFeedPillow'):
             case = create_and_save_a_case(self.domain,
                                           case_id=uuid.uuid4().hex,
                                           case_name='test case')