Ejemplo n.º 1
0
    def test_duplicate_form_published(self):
        form_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(form_id)
        orig_form = submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(form_id, orig_form.form_id)
        self.assertEqual(1,
                         len(self.form_accessors.get_all_form_ids_in_domain()))

        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                # post an exact duplicate
                dupe_form = submit_form_locally(form_xml,
                                                domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)
                self.assertNotEqual(form_id, dupe_form.form_id)
                if should_use_sql_backend(self.domain):
                    self.assertEqual(form_id, dupe_form.orig_id)

        # make sure changes made it to kafka
        dupe_form_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(dupe_form.form_id, dupe_form_meta.document_id)
        self.assertEqual(dupe_form.domain, dupe_form.domain)
        if should_use_sql_backend(self.domain):
            # sql domains also republish the original form to ensure that if the server crashed
            # in the processing of the form the first time that it is still sent to kafka
            orig_form_meta = self.processor.changes_seen[1].metadata
            self.assertEqual(orig_form.form_id, orig_form_meta.document_id)
            self.assertEqual(self.domain, orig_form_meta.domain)
            self.assertEqual(dupe_form.domain, dupe_form.domain)
Ejemplo n.º 2
0
    def test_duplicate_form_published(self):
        form_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(form_id)
        orig_form = submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(form_id, orig_form.form_id)
        self.assertEqual(1, len(self.form_accessors.get_all_form_ids_in_domain()))

        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                # post an exact duplicate
                dupe_form = submit_form_locally(form_xml, domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)
                self.assertNotEqual(form_id, dupe_form.form_id)
                if should_use_sql_backend(self.domain):
                    self.assertEqual(form_id, dupe_form.orig_id)

        # make sure changes made it to kafka
        dupe_form_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(dupe_form.form_id, dupe_form_meta.document_id)
        self.assertEqual(dupe_form.domain, dupe_form.domain)
        if should_use_sql_backend(self.domain):
            # sql domains also republish the original form to ensure that if the server crashed
            # in the processing of the form the first time that it is still sent to kafka
            orig_form_meta = self.processor.changes_seen[1].metadata
            self.assertEqual(orig_form.form_id, orig_form_meta.document_id)
            self.assertEqual(self.domain, orig_form_meta.domain)
            self.assertEqual(dupe_form.domain, dupe_form.domain)
Ejemplo n.º 3
0
 def _create_case_and_sync_to_es(self, domain):
     case_id = uuid.uuid4().hex
     case_name = 'case-name-{}'.format(uuid.uuid4().hex)
     with process_kafka_changes('ReportCaseToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             create_and_save_a_case(domain, case_id, case_name)
     self.elasticsearch.indices.refresh(REPORT_CASE_INDEX_INFO.index)
     return case_id, case_name
Ejemplo n.º 4
0
 def _create_case_and_sync_to_es(self):
     case_id = uuid.uuid4().hex
     case_name = 'case-name-{}'.format(uuid.uuid4().hex)
     with process_kafka_changes('CaseToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             create_and_save_a_case(self.domain, case_id, case_name)
     self.elasticsearch.indices.refresh(CASE_INDEX_INFO.index)
     return case_id, case_name
Ejemplo n.º 5
0
 def _create_form_and_sync_to_es(self, domain):
     with process_kafka_changes('ReportXFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=domain)
             form = get_form_ready_to_save(metadata)
             FormProcessorInterface(domain=domain).save_processed_models([form])
     self.elasticsearch.indices.refresh(REPORT_XFORM_INDEX_INFO.index)
     return form, metadata
Ejemplo n.º 6
0
 def _create_case_and_sync_to_es(self, domain):
     case_id = uuid.uuid4().hex
     case_name = "case-name-{}".format(uuid.uuid4().hex)
     with process_kafka_changes("ReportCaseToElasticsearchPillow"):
         with process_couch_changes("DefaultChangeFeedPillow"):
             create_and_save_a_case(domain, case_id, case_name)
     self.elasticsearch.indices.refresh(REPORT_CASE_INDEX_INFO.index)
     return case_id, case_name
Ejemplo n.º 7
0
    def test_form_is_published(self):
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form = create_and_save_a_form(self.domain)

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)
Ejemplo n.º 8
0
    def test_case_is_published(self):
        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                case = create_and_save_a_case(self.domain, case_id=uuid.uuid4().hex, case_name='test case')

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)
Ejemplo n.º 9
0
 def _create_form_and_sync_to_es(self, domain):
     with process_kafka_changes('ReportXFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=domain)
             form = get_form_ready_to_save(metadata)
             FormProcessorInterface(domain=domain).save_processed_models(
                 [form])
     self.elasticsearch.indices.refresh(REPORT_XFORM_INDEX_INFO.index)
     return form, metadata
Ejemplo n.º 10
0
 def _create_form_and_sync_to_es(self):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=self.domain)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=self.domain)
             form_processor.save_processed_models([form])
     self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
     return form, metadata
Ejemplo n.º 11
0
 def create_form_and_sync_to_es(received_on):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=cls.domain, app_id=cls.app_id,
                                         xmlns=cls.xmlns, received_on=received_on)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=cls.domain)
             form_processor.save_processed_models([form])
     return form
Ejemplo n.º 12
0
 def _create_form_and_sync_to_es(self):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=self.domain)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=self.domain)
             form_processor.save_processed_models([form])
     self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
     return form, metadata
Ejemplo n.º 13
0
    def test_form_is_published(self):
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form = create_and_save_a_form(self.domain)

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)
Ejemplo n.º 14
0
    def test_form_soft_deletions(self):
        form = create_and_save_a_form(self.domain)
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form.soft_delete()

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertTrue(change_meta.is_deletion)
Ejemplo n.º 15
0
    def test_form_soft_deletions(self):
        form = create_and_save_a_form(self.domain)
        with process_kafka_changes(self.form_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                form.soft_delete()

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(form.form_id, change_meta.document_id)
        self.assertTrue(change_meta.is_deletion)
Ejemplo n.º 16
0
 def create_form_and_sync_to_es(received_on):
     with process_kafka_changes('XFormToElasticsearchPillow'):
         with process_couch_changes('DefaultChangeFeedPillow'):
             metadata = TestFormMetadata(domain=cls.domain,
                                         app_id=cls.app_id,
                                         xmlns=cls.xmlns,
                                         received_on=received_on)
             form = get_form_ready_to_save(metadata, is_db_test=True)
             form_processor = FormProcessorInterface(domain=cls.domain)
             form_processor.save_processed_models([form])
     return form
Ejemplo n.º 17
0
    def test_case_soft_deletion(self):
        case_id, case_name = self._create_case_and_sync_to_es()

        # verify there
        results = CaseES().run()
        self.assertEqual(1, results.total)

        # soft delete the case
        with process_kafka_changes('CaseToElasticsearchPillow'):
            with process_couch_changes('DefaultChangeFeedPillow'):
                CaseAccessors(self.domain).soft_delete_cases([case_id])
        self.elasticsearch.indices.refresh(CASE_INDEX_INFO.index)

        # ensure not there anymore
        results = CaseES().run()
        self.assertEqual(0, results.total)
Ejemplo n.º 18
0
    def test_form_soft_deletion(self):
        form, metadata = self._create_form_and_sync_to_es()

        # verify there
        results = FormES().run()
        self.assertEqual(1, results.total)

        # soft delete the form
        with process_kafka_changes('XFormToElasticsearchPillow'):
            with process_couch_changes('DefaultChangeFeedPillow'):
                FormAccessors(self.domain).soft_delete_forms([form.form_id])
        self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)

        # ensure not there anymore
        results = FormES().run()
        self.assertEqual(0, results.total)
Ejemplo n.º 19
0
    def test_form_soft_deletion(self):
        form, metadata = self._create_form_and_sync_to_es()

        # verify there
        results = FormES().run()
        self.assertEqual(1, results.total)

        # soft delete the form
        with process_kafka_changes('XFormToElasticsearchPillow'):
            with process_couch_changes('DefaultChangeFeedPillow'):
                FormAccessors(self.domain).soft_delete_forms([form.form_id])
        self.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)

        # ensure not there anymore
        results = FormES().run()
        self.assertEqual(0, results.total)
Ejemplo n.º 20
0
    def test_duplicate_case_published(self):
        # this test only runs on sql because it's handling a sql-specific edge case where duplicate
        # form submissions should cause cases to be resubmitted.
        # see: http://manage.dimagi.com/default.asp?228463 for context
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(1, len(CaseAccessors(self.domain).get_case_ids_in_domain()))

        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form_xml, domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)

        # check the case was republished
        self.assertEqual(1, len(self.processor.changes_seen))
        case_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case_id, case_meta.document_id)
        self.assertEqual(self.domain, case_meta.domain)
Ejemplo n.º 21
0
    def test_duplicate_ledger_published(self):
        # this test also only runs on the sql backend for reasons described in test_duplicate_case_published
        # setup products and case
        product_a = make_product(self.domain, 'A Product', 'prodcode_a')
        product_b = make_product(self.domain, 'B Product', 'prodcode_b')
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]

        # submit ledger data
        balances = (
            (product_a._id, 100),
            (product_b._id, 50),
        )
        ledger_blocks = [
            get_single_balance_block(case_id, prod_id, balance)
            for prod_id, balance in balances
        ]
        form = submit_case_blocks(ledger_blocks, self.domain)[0]

        # submit duplicate
        with process_kafka_changes(self.ledger_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form.get_xml(),
                                                domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)

        # confirm republished
        ledger_meta_a = self.processor.changes_seen[0].metadata
        ledger_meta_b = self.processor.changes_seen[1].metadata
        format_id = lambda product_id: '{}/{}/{}'.format(
            case_id, 'stock', product_id)
        expected_ids = {format_id(product_a._id), format_id(product_b._id)}
        for meta in [ledger_meta_a, ledger_meta_b]:
            self.assertTrue(meta.document_id in expected_ids)
            expected_ids.remove(meta.document_id)
            self.assertEqual(self.domain, meta.domain)

        # cleanup
        product_a.delete()
        product_b.delete()
Ejemplo n.º 22
0
    def test_duplicate_case_published(self):
        # this test only runs on sql because it's handling a sql-specific edge case where duplicate
        # form submissions should cause cases to be resubmitted.
        # see: http://manage.dimagi.com/default.asp?228463 for context
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(
            1, len(CaseAccessors(self.domain).get_case_ids_in_domain()))

        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form_xml,
                                                domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)

        # check the case was republished
        self.assertEqual(1, len(self.processor.changes_seen))
        case_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case_id, case_meta.document_id)
        self.assertEqual(self.domain, case_meta.domain)
Ejemplo n.º 23
0
    def test_duplicate_ledger_published(self):
        # this test also only runs on the sql backend for reasons described in test_duplicate_case_published
        # setup products and case
        product_a = make_product(self.domain, 'A Product', 'prodcode_a')
        product_b = make_product(self.domain, 'B Product', 'prodcode_b')
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]

        # submit ledger data
        balances = (
            (product_a._id, 100),
            (product_b._id, 50),
        )
        ledger_blocks = [
            get_single_balance_block(case_id, prod_id, balance)
            for prod_id, balance in balances
        ]
        form = submit_case_blocks(ledger_blocks, self.domain)[0]

        # submit duplicate
        with process_kafka_changes(self.ledger_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                dupe_form = submit_form_locally(form.get_xml(), domain=self.domain)[1]
                self.assertTrue(dupe_form.is_duplicate)


        # confirm republished
        ledger_meta_a = self.processor.changes_seen[0].metadata
        ledger_meta_b = self.processor.changes_seen[1].metadata
        format_id = lambda product_id: '{}/{}/{}'.format(case_id, 'stock', product_id)
        expected_ids = {format_id(product_a._id), format_id(product_b._id)}
        for meta in [ledger_meta_a, ledger_meta_b]:
            self.assertTrue(meta.document_id in expected_ids)
            expected_ids.remove(meta.document_id)
            self.assertEqual(self.domain, meta.domain)

        # cleanup
        product_a.delete()
        product_b.delete()
Ejemplo n.º 24
0
 def test_case_is_published(self):
     with process_kafka_changes(self.case_pillow):
         with process_couch_changes('DefaultChangeFeedPillow'):
             case = create_and_save_a_case(self.domain,
                                           case_id=uuid.uuid4().hex,
                                           case_name='test case')
Ejemplo n.º 25
0
                case = create_and_save_a_case(self.domain,
                                              case_id=uuid.uuid4().hex,
                                              case_name='test case')

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertEqual(self.domain, change_meta.domain)

    @run_with_all_backends
    def test_case_deletions(self):
        case = create_and_save_a_case(self.domain,
                                      case_id=uuid.uuid4().hex,
                                      case_name='test case')
        with process_kafka_changes(self.case_pillow):
            with process_couch_changes('DefaultChangeFeedPillow'):
                case.soft_delete()

        self.assertEqual(1, len(self.processor.changes_seen))
        change_meta = self.processor.changes_seen[0].metadata
        self.assertEqual(case.case_id, change_meta.document_id)
        self.assertTrue(change_meta.is_deletion)

    def test_duplicate_case_published(self):
        # this test only runs on sql because it's handling a sql-specific edge case where duplicate
        # form submissions should cause cases to be resubmitted.
        # see: http://manage.dimagi.com/default.asp?228463 for context
        case_id = uuid.uuid4().hex
        form_xml = get_simple_form_xml(uuid.uuid4().hex, case_id)
        submit_form_locally(form_xml, domain=self.domain)[1]
        self.assertEqual(