def testCacheInvalidation(self): original_payload = RestoreConfig( self.user, version=V2, restore_id=self.sync_log._id, ).get_payload().as_string() self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) # posting a case associated with this sync token should invalidate the cache case_id = "cache_invalidation" self._createCaseStubs([case_id]) self.sync_log = SyncLog.get(self.sync_log._id) self.assertFalse(self.sync_log.has_cached_payload(V2)) # resyncing should recreate the cache next_payload = RestoreConfig( self.user, version=V2, restore_id=self.sync_log._id, ).get_payload().as_string() self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) self.assertNotEqual(original_payload, next_payload) self.assertFalse(case_id in original_payload) # since it was our own update, it shouldn't be in the new payload either self.assertFalse(case_id in next_payload) # we can be explicit about why this is the case self.assertTrue(self.sync_log.phone_has_case(case_id))
def check_user_has_case(testcase, user, case_blocks, should_have=True, line_by_line=True, restore_id="", version=V2, purge_restore_cache=False, return_single=False): if restore_id and purge_restore_cache: SyncLog.get(restore_id).invalidate_cached_payloads() restore_config = RestoreConfig(project=user.project, restore_user=user, params=RestoreParams(restore_id, version=version)) payload_string = restore_config.get_payload().as_string() return check_payload_has_cases( testcase=testcase, payload_string=payload_string, username=user.username, case_blocks=case_blocks, should_have=should_have, line_by_line=line_by_line, version=version, return_single=return_single, restore_config=restore_config, )
def testOtherUserUpdatesIndex(self): # create a parent and child case (with index) from one user parent_id = "other_updates_index_parent" case_id = "other_updates_index_child" self._createCaseStubs([parent_id]) child = CaseBlock( create=True, case_id=case_id, user_id=USER_ID, owner_id=USER_ID, version=V2, index={'mother': ('mother', parent_id)} ).as_xml() self._postFakeWithSyncToken(child, self.sync_log.get_id) assert_user_doesnt_have_case(self, self.user, parent_id, restore_id=self.sync_log.get_id) assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id) # assign the parent case away from same user parent_update = CaseBlock( create=False, case_id=parent_id, user_id=USER_ID, owner_id=OTHER_USER_ID, update={"greeting": "hello"}, version=V2).as_xml() self._postFakeWithSyncToken(parent_update, self.sync_log.get_id) self.sync_log = SyncLog.get(self.sync_log.get_id) # these tests added to debug another issue revealed by this test self.assertTrue(self.sync_log.phone_has_case(case_id)) self.assertTrue(self.sync_log.phone_has_dependent_case(parent_id)) self.assertTrue(self.sync_log.phone_is_holding_case(case_id)) self.assertTrue(self.sync_log.phone_is_holding_case(parent_id)) # original user syncs again # make sure there are no new changes assert_user_doesnt_have_case(self, self.user, parent_id, restore_id=self.sync_log.get_id, purge_restore_cache=True) assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id) assert_user_has_case(self, self.other_user, parent_id, restore_id=self.other_sync_log.get_id, purge_restore_cache=True) # update the parent case from another user self.other_sync_log = SyncLog.last_for_user(OTHER_USER_ID) other_parent_update = CaseBlock( create=False, case_id=parent_id, user_id=OTHER_USER_ID, update={"greeting2": "hi"}, version=V2 ).as_xml() self._postFakeWithSyncToken(other_parent_update, self.other_sync_log.get_id) # make sure the indexed case syncs again self.sync_log = SyncLog.last_for_user(USER_ID) assert_user_has_case(self, self.user, parent_id, restore_id=self.sync_log.get_id, purge_restore_cache=True)
def test_archiving_location_should_resync(self): """ When locations are archived, we should resync them """ couch_location = Location( domain=self.domain, name='winterfell', location_type=self.location_type.name, ) couch_location.save() after_save = datetime.utcnow() location = SQLLocation.objects.last() self.assertEqual(couch_location.location_id, location.location_id) self.assertEqual('winterfell', location.name) location_db = LocationSet([location]) self.assertFalse( should_sync_locations(SyncLog(date=after_save), location_db, self.user.to_ota_restore_user())) # archive the location location.archive() after_archive = datetime.utcnow() location = SQLLocation.objects.last() location_db = LocationSet([location]) self.assertTrue( should_sync_locations(SyncLog(date=after_save), location_db, self.user.to_ota_restore_user())) self.assertFalse( should_sync_locations(SyncLog(date=after_archive), location_db, self.user.to_ota_restore_user()))
def test_should_sync_locations_change_location_type(self): """ When location_type gets changed, we should resync locations """ yesterday = datetime.today() - timedelta(1) day_before_yesterday = yesterday - timedelta(1) LocationType.objects.all().update(last_modified=day_before_yesterday) # Force update because of auto_now self.location_type = LocationType.objects.last() location = SQLLocation( domain=self.domain, name="Meereen", location_type=self.location_type, metadata={'queen': "Daenerys Targaryen", 'rebels': "Sons of the Harpy"}, ) location.save() SQLLocation.objects.filter(pk=location.pk).update(last_modified=day_before_yesterday) location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) self.assertFalse( should_sync_locations(SyncLog(date=yesterday), locations_queryset, self.user.to_ota_restore_user()) ) self.location_type.shares_cases = True self.location_type.save() location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) self.assertTrue( should_sync_locations(SyncLog(date=yesterday), locations_queryset, self.user.to_ota_restore_user()) )
def test_archiving_location_should_resync(self): """ When locations are archived, we should resync them """ location = make_location( domain=self.domain, name='winterfell', location_type=self.location_type.name, ) location.save() after_save = datetime.utcnow() self.assertEqual('winterfell', location.name) locations_queryset = SQLLocation.objects.filter(pk=location.pk) self.assertFalse( should_sync_locations(SyncLog(date=after_save), locations_queryset, self.user.to_ota_restore_user())) # archive the location location.archive() after_archive = datetime.utcnow() location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) self.assertTrue( should_sync_locations(SyncLog(date=after_save), locations_queryset, self.user.to_ota_restore_user())) self.assertFalse( should_sync_locations(SyncLog(date=after_archive), locations_queryset, self.user.to_ota_restore_user()))
def test_pillow(self): from corehq.apps.change_feed.topics import get_topic_offset from corehq.pillows.synclog import get_user_sync_history_pillow consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL) # get the seq id before the change is published kafka_seq = get_topic_offset(topics.SYNCLOG_SQL) # make sure user has empty reporting-metadata before a sync self.assertEqual(self.ccuser.reporting_metadata.last_syncs, []) # do a sync synclog = SyncLog(domain=self.domain.name, user_id=self.ccuser._id, date=datetime.datetime(2015, 7, 1, 0, 0)) synclog.save() # make sure kafka change updates the user with latest sync info message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) synclog = self._get_latest_synclog() self.assertEqual(change_meta.document_id, synclog._id) self.assertEqual(change_meta.domain, self.domain.name) # make sure processor updates the user correctly pillow = get_user_sync_history_pillow() pillow.process_changes(since=kafka_seq, forever=False) ccuser = CommCareUser.get(self.ccuser._id) self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1) self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date) self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
def test_cases_in_footprint(self): log = SyncLog(cases_on_phone=[CaseState(case_id="c1", indices=[]), CaseState(case_id="c2", indices=[])]) self.assertEqual(2, len(log.get_footprint_of_cases_on_phone())) log.cases_on_phone.append(CaseState(case_id="c3", indices=[])) self.assertEqual(3, len(log.get_footprint_of_cases_on_phone()))
def testShouldHaveCase(self): case_id = "should_have" self._createCaseStubs([case_id]) sync_log = SyncLog.get(self.sync_log._id) self.assertEqual(1, len(sync_log.cases_on_phone)) self.assertEqual(case_id, sync_log.cases_on_phone[0].case_id) # manually delete it and then try to update sync_log.cases_on_phone = [] sync_log.save() update = CaseBlock( create=False, case_id=case_id, user_id=USER_ID, owner_id=USER_ID, case_type=PARENT_TYPE, version=V2, update={ 'something': "changed" }, ).as_xml() # this should work because it should magically fix itself self._postFakeWithSyncToken(update, self.sync_log.get_id) sync_log = SyncLog.get(self.sync_log._id) self.assertFalse(getattr(sync_log, 'has_assert_errors', False))
def testCacheInvalidation(self): original_payload = RestoreConfig( self.user, version=V2, caching_enabled=True, restore_id=self.sync_log._id, ).get_payload() self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) # posting a case associated with this sync token should invalidate the cache case_id = "cache_invalidation" self._createCaseStubs([case_id]) self.sync_log = SyncLog.get(self.sync_log._id) self.assertFalse(self.sync_log.has_cached_payload(V2)) # resyncing should recreate the cache next_payload = RestoreConfig( self.user, version=V2, caching_enabled=True, restore_id=self.sync_log._id, ).get_payload() self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) self.assertNotEqual(original_payload, next_payload) self.assertFalse(case_id in original_payload) self.assertTrue(case_id in next_payload)
def testShouldHaveCase(self): case_id = "should_have" self._createCaseStubs([case_id]) sync_log = SyncLog.get(self.sync_log._id) self.assertEqual(1, len(sync_log.cases_on_phone)) self.assertEqual(case_id, sync_log.cases_on_phone[0].case_id) # manually delete it and then try to update sync_log.cases_on_phone = [] sync_log.save() update = CaseBlock( create=False, case_id=case_id, user_id=USER_ID, owner_id=USER_ID, case_type=PARENT_TYPE, version=V2, update={'something': "changed"}, ).as_xml() # this should work because it should magically fix itself self._postFakeWithSyncToken(update, self.sync_log.get_id) sync_log = SyncLog.get(self.sync_log._id) self.assertFalse(getattr(sync_log, 'has_assert_errors', False))
def test_should_sync_when_changed(self, *args): self.user._couch_user.add_to_assigned_locations( self.locations['Boston']) last_sync_time = datetime.utcnow() sync_log = SyncLog(date=last_sync_time) locations_queryset = SQLLocation.objects.filter( pk=self.locations['Boston'].pk) restore_state = MockRestoreState(self.user, RestoreParams()) self.assertFalse( should_sync_locations(sync_log, locations_queryset, restore_state)) self.assertEquals( len( call_fixture_generator(related_locations_fixture_generator, self.user, last_sync=sync_log)), 0) LocationRelation.objects.create(location_a=self.locations["Revere"], location_b=self.locations["Boston"]) self.assertTrue( should_sync_locations(SyncLog(date=last_sync_time), locations_queryset, restore_state)) # length 2 for index definition + data self.assertEquals( len( call_fixture_generator(related_locations_fixture_generator, self.user, last_sync=sync_log)), 2)
def get_all_sync_logs_docs(): assert settings.UNIT_TESTING all_sync_log_ids = [row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", reduce=False, )] return iter_docs(SyncLog.get_db(), all_sync_log_ids)
def test_archiving_location_should_resync(self): """ When locations are archived, we should resync them """ location = make_location( domain=self.domain, name='winterfell', location_type=self.location_type.name, ) location.save() after_save = datetime.utcnow() self.assertEqual('winterfell', location.name) locations_queryset = SQLLocation.objects.filter(pk=location.pk) restore_state = MockRestoreState(self.user.to_ota_restore_user(), RestoreParams()) # Should not resync if last sync was after location save self.assertFalse( should_sync_locations(SyncLog(date=after_save), locations_queryset, restore_state)) # archive the location location.archive() after_archive = datetime.utcnow() location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) # Should resync if last sync was after location was saved but before location was archived self.assertTrue( should_sync_locations(SyncLog(date=after_save), locations_queryset, restore_state)) # Should not resync if last sync was after location was deleted self.assertFalse( should_sync_locations(SyncLog(date=after_archive), locations_queryset, restore_state))
def check_user_has_case(testcase, user, case_blocks, should_have=True, line_by_line=True, restore_id="", version=V2, purge_restore_cache=False, return_single=False): if not isinstance(case_blocks, list): case_blocks = [case_blocks] return_single = True XMLNS = NS_VERSION_MAP.get(version, 'http://openrosa.org/http/response') if restore_id and purge_restore_cache: SyncLog.get(restore_id).invalidate_cached_payloads() restore_config = RestoreConfig( project=user.project, restore_user=user, params=RestoreParams(restore_id, version=version) ) payload_string = restore_config.get_payload().as_string() blocks_from_restore = extract_caseblocks_from_xml(payload_string, version) def check_block(case_block): case_block.set('xmlns', XMLNS) case_block = RestoreCaseBlock(ElementTree.fromstring(ElementTree.tostring(case_block)), version=version) case_id = case_block.get_case_id() n = 0 def extra_info(): return "\n%s\n%s" % (case_block.to_string(), map(lambda b: b.to_string(), blocks_from_restore)) match = None for block in blocks_from_restore: if block.get_case_id() == case_id: if should_have: if line_by_line: check_xml_line_by_line( testcase, case_block.to_string(), block.to_string(), ) match = block n += 1 if n == 2: testcase.fail( "Block for case_id '%s' appears twice" " in ota restore for user '%s':%s" % (case_id, user.username, extra_info()) ) else: testcase.fail( "User '%s' gets case '%s' " "but shouldn't:%s" % (user.username, case_id, extra_info()) ) if not n and should_have: testcase.fail("Block for case_id '%s' doesn't appear in ota restore for user '%s':%s" % (case_id, user.username, extra_info())) return match matches = [check_block(case_block) for case_block in case_blocks] return restore_config, matches[0] if return_single else matches
def rows(self): base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find')) user_id = self.request.GET.get('individual') if not user_id: return [] # security check get_document_or_404(CommCareUser, self.domain, user_id) sync_log_ids = [row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, limit=10 )] def _sync_log_to_row(sync_log): def _fmt_duration(duration): if isinstance(duration, int): return format_datatables_data( '<span class="{cls}">{text}</span>'.format( cls=_bootstrap_class(duration or 0, 60, 20), text=_('{} seconds').format(duration), ), duration ) else: return format_datatables_data( '<span class="label">{text}</span>'.format( text=_("Unknown"), ), -1, ) def _fmt_id(sync_log_id): href = base_link_url.format(id=sync_log_id) return '<a href="{href}" target="_blank">{id:.5}...</a>'.format( href=href, id=sync_log_id ) num_cases = len(sync_log.cases_on_phone) columns = [ _fmt_date(sync_log.date), format_datatables_data(num_cases, num_cases), _fmt_duration(sync_log.duration), ] if self.show_extra_columns: columns.append(_fmt_id(sync_log.get_id)) return columns return [ _sync_log_to_row(SyncLog.wrap(sync_log_json)) for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids) ]
def test_sync_log_invalidation_bug(self): sync_log = SyncLog(user_id='6dac4940-913e-11e0-9d4b-005056aa7fb5') sync_log.save() self.testAttachInCreate() # this used to fail before we fixed http://manage.dimagi.com/default.asp?158373 self._doSubmitUpdateWithMultimedia(new_attachments=['commcare_logo_file'], removes=[], sync_token=sync_log._id) sync_log.delete()
def test_default(self): synclog = SyncLog(domain='test', user_id='user1', date=datetime.datetime(2015, 7, 1, 0, 0)) synclog.save() self.assertEqual(self._count(), 1) delete_synclog(synclog._id) self.assertEqual(self._count(), 0)
def test_prune_on_migrate(self): sync_log = SyncLog( cases_on_phone=[CaseState(case_id="robert"), CaseState(case_id="cersei")], dependent_cases_on_phone=[CaseState(case_id="gendry")], ) migrated = SimplifiedSyncLog.from_other_format(sync_log) self.assertTrue("gendry" not in migrated.case_ids_on_phone) self.assertEqual(sync_log.get_state_hash(), migrated.get_state_hash())
def get_all_sync_logs_docs(): all_sync_log_ids = [ row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", reduce=False, ) ] return iter_docs(SyncLog.get_db(), all_sync_log_ids)
def test_selective_product_sync(self): user = bootstrap_user(self, phone_number="1234567890") expected_xml = self.generate_product_fixture_xml(user) product_list = Product.by_domain(user.domain) self._initialize_product_names(len(product_list)) fixture_original = product_fixture_generator(user, V1, None, None) generate_restore_payload(user.to_casexml_user()) self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_original[0]) ) first_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] # make sure the time stamp on this first sync is # not on the same second that the products were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no products synced fixture_pre_change = product_fixture_generator(user, V1, None, first_sync) generate_restore_payload(user.to_casexml_user()) self.assertEqual( [], fixture_pre_change, "Fixture was not empty on second sync" ) second_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the product more recently updated than the # last sync for product in product_list: product.save() # now that we've updated a product, we should get # product data in sync again fixture_post_change = product_fixture_generator(user, V1, None, second_sync) # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_post_change[0]) )
def test_selective_product_sync(self): user = self.user expected_xml = self.generate_product_fixture_xml(user) product_list = Product.by_domain(user.domain) self._initialize_product_names(len(product_list)) fixture_original = call_fixture_generator(product_fixture_generator, user)[1] deprecated_generate_restore_payload(self.domain_obj, user) self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_original) ) first_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] # make sure the time stamp on this first sync is # not on the same second that the products were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no products synced fixture_pre_change = call_fixture_generator(product_fixture_generator, user, last_sync=first_sync) deprecated_generate_restore_payload(self.domain_obj, user) self.assertEqual( [], fixture_pre_change, "Fixture was not empty on second sync" ) second_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the product more recently updated than the # last sync for product in product_list: product.save() # now that we've updated a product, we should get # product data in sync again fixture_post_change = call_fixture_generator(product_fixture_generator, user, last_sync=second_sync)[1] # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_post_change) )
def get_payload(self): user = self.user last_sync = self.sync_log self.validate() cached_payload = self.get_cached_payload() if cached_payload: return cached_payload sync_operation = user.get_case_updates(last_sync) case_xml_elements = [ xml.get_case_element(op.case, op.required_updates, self.version) for op in sync_operation.actual_cases_to_sync ] commtrack_elements = self.get_stock_payload(sync_operation) last_seq = str(get_db().info()["update_seq"]) # create a sync log for this previous_log_id = last_sync.get_id if last_sync else None synclog = SyncLog( user_id=user.user_id, last_seq=last_seq, owner_ids_on_phone=user.get_owner_ids(), date=datetime.utcnow(), previous_log_id=previous_log_id, cases_on_phone=[CaseState.from_case(c) for c in sync_operation.actual_owned_cases], dependent_cases_on_phone=[CaseState.from_case(c) for c in sync_operation.actual_extended_cases], ) synclog.save(**get_safe_write_kwargs()) # start with standard response response = get_response_element( "Successfully restored account %s!" % user.username, ResponseNature.OTA_RESTORE_SUCCESS ) # add sync token info response.append(xml.get_sync_element(synclog.get_id)) # registration block response.append(xml.get_registration_element(user)) # fixture block for fixture in generator.get_fixtures(user, self.version, last_sync): response.append(fixture) # case blocks for case_elem in case_xml_elements: response.append(case_elem) for ct_elem in commtrack_elements: response.append(ct_elem) if self.items: response.attrib["items"] = "%d" % len(response.getchildren()) resp = xml.tostring(response) self.set_cached_payload_if_enabled(resp) return resp
def test_should_sync_timezone(self): domain = Domain(name='test', default_timezone='Africa/Johannesburg') # yesterday at 21:59:59 = yesterday at 23:59:59 locally last_sync = datetime.combine(date.today() - timedelta(days=1), time(21, 59, 59)) # yesterday at 21:59:59 = today at 00:00:00 locally utcnow = datetime.combine(date.today() - timedelta(days=1), time(22, 00, 00)) self.assertTrue(should_sync(domain, SyncLog(date=last_sync), utcnow=utcnow)) domain = Domain(name='test', default_timezone='UTC') self.assertFalse(should_sync(domain, SyncLog(date=last_sync), utcnow=utcnow))
def create_sync_log(self): previous_log_id = None if self.is_initial else self.last_sync_log._id last_seq = str(get_db().info()["update_seq"]) new_synclog = SyncLog(user_id=self.user.user_id, last_seq=last_seq, owner_ids_on_phone=list(self.owner_ids), date=datetime.utcnow(), previous_log_id=previous_log_id) new_synclog.save(**get_safe_write_kwargs()) return new_synclog
def test_purge_on_migrate(self): sync_log = SyncLog( cases_on_phone=[ CaseState(case_id='robert'), CaseState(case_id='cersei'), ], dependent_cases_on_phone=[CaseState(case_id='gendry')]) migrated = SimplifiedSyncLog.from_other_format(sync_log) self.assertTrue('gendry' not in migrated.case_ids_on_phone) self.assertEqual(sync_log.get_state_hash(), migrated.get_state_hash())
def test_dependent_cases(self): log = SyncLog(cases_on_phone=[CaseState(case_id="c1", indices=[CommCareCaseIndex(identifier="d1-id", referenced_id="d1")])], dependent_cases_on_phone=[CaseState(case_id="d1", indices=[]), CaseState(case_id="d2", indices=[])]) # d1 counts because it's referenced, d2 doesn't self.assertEqual(2, len(log.get_footprint_of_cases_on_phone())) self.assertTrue("d1" in log.get_footprint_of_cases_on_phone()) self.assertFalse("d2" in log.get_footprint_of_cases_on_phone())
def testMultiUserEdits(self): # create a case from one user case_id = "multi_user_edits" self._createCaseStubs([case_id], owner_id=SHARED_ID) # both users syncs generate_restore_payload(self.user) generate_restore_payload(self.other_user) self.sync_log = SyncLog.last_for_user(USER_ID) self.other_sync_log = SyncLog.last_for_user(OTHER_USER_ID) # update case from same user my_change = CaseBlock( create=False, case_id=case_id, user_id=USER_ID, version=V2, update={'greeting': 'hello'} ).as_xml() self._postFakeWithSyncToken( my_change, self.sync_log.get_id ) # update from another user their_change = CaseBlock( create=False, case_id=case_id, user_id=USER_ID, version=V2, update={'greeting_2': 'hello'} ).as_xml() self._postFakeWithSyncToken( their_change, self.other_sync_log.get_id ) # original user syncs again # make sure updates both appear (and merge?) joint_change = CaseBlock( create=False, case_id=case_id, user_id=USER_ID, version=V2, update={ 'greeting': 'hello', 'greeting_2': 'hello' }, owner_id=SHARED_ID, case_name='', case_type='mother', ).as_xml() check_user_has_case(self, self.user, joint_change, restore_id=self.sync_log.get_id, version=V2) check_user_has_case(self, self.other_user, joint_change, restore_id=self.other_sync_log.get_id, version=V2)
def create_sync_log(self): previous_log_id = None if self.is_initial else self.last_sync_log._id last_seq = str(get_db().info()["update_seq"]) new_synclog = SyncLog( user_id=self.user.user_id, last_seq=last_seq, owner_ids_on_phone=list(self.owner_ids), date=datetime.utcnow(), previous_log_id=previous_log_id ) new_synclog.save(**get_safe_write_kwargs()) return new_synclog
def test_selective_program_sync(self): user = bootstrap_user(self, phone_number="1234567890") Program(domain=user.domain, name="test1", code="t1").save() program_list = Program.by_domain(user.domain) program_xml = self.generate_program_xml(program_list, user) fixture_original = program_fixture_generator(user, V1) generate_restore_payload(self.domain, user.to_casexml_user()) self.assertXmlEqual(program_xml, ElementTree.tostring(fixture_original[0])) first_sync = sorted(SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(), key=lambda x: x.date)[-1] # make sure the time stamp on this first sync is # not on the same second that the programs were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no programs synced fixture_pre_change = program_fixture_generator(user, V1, last_sync=first_sync) generate_restore_payload(self.domain, user.to_casexml_user()) self.assertEqual([], fixture_pre_change, "Fixture was not empty on second sync") second_sync = sorted(SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(), key=lambda x: x.date)[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the program more recently updated than the # last sync for program in program_list: program.save() # now that we've updated a program, we should get # program data in sync again fixture_post_change = program_fixture_generator(user, V1, last_sync=second_sync) # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual(program_xml, ElementTree.tostring(fixture_post_change[0]))
def test_purge_on_migrate(self): sync_log = SyncLog( cases_on_phone=[ CaseState(case_id='robert'), CaseState(case_id='cersei'), ], dependent_cases_on_phone=[ CaseState(case_id='gendry') ] ) migrated = SimplifiedSyncLog.from_other_format(sync_log) self.assertTrue('gendry' not in migrated.case_ids_on_phone) self.assertEqual(sync_log.get_state_hash(), migrated.get_state_hash())
def test_couch_synclogs(self): synclog = SyncLog(domain='test', user_id='user1', date=datetime.datetime(2015, 7, 1, 0, 0)) SyncLog.get_db().save_doc(synclog) self.assertEqual(self._sql_count(), 0) self.assertEqual(self._couch_count(), 1) delete_synclog(synclog._id) self.assertEqual(self._sql_count(), 0) self.assertEqual(self._couch_count(), 0) with self.assertRaises(MissingSyncLog): delete_synclog(synclog._id)
def test_sync_log(self): from casexml.apps.phone.models import SyncLog, SimplifiedSyncLog from corehq.apps.users.models import WebUser, CommCareUser from casexml.apps.phone.models import get_sync_log_class_by_format web_user = WebUser.create( domain=self.domain_name, username='******', password='******', email='*****@*****.**', ) mobile_user = CommCareUser.create( self.domain_name, 'mobile_user1', 'secret' ) other_user = CommCareUser.create( 'other_domain', 'mobile_user2', 'secret' ) self.addCleanup(other_user.delete) l1 = SyncLog(user_id=web_user._id) l1.save() l2 = SimplifiedSyncLog(user_id=mobile_user._id) l2.save() other_log = SyncLog(user_id=other_user._id) other_log.save() def _synclog_to_class(doc): if doc['doc_type'] == 'SyncLog': return get_sync_log_class_by_format(doc.get('log_format')) expected_docs = [web_user, mobile_user, l1, l2] not_expected_docs = [other_user, other_log] self._dump_and_load(expected_docs, not_expected_docs, doc_to_doc_class=_synclog_to_class)
def setUpClass(cls): cls.user_id = 'lkasdhfadsloi' cls.sync_logs = [ SyncLog(user_id=cls.user_id, date=datetime.datetime(2015, 7, 1, 0, 0)), SimplifiedSyncLog(user_id=cls.user_id, date=datetime.datetime(2015, 3, 1, 0, 0)), SyncLog(user_id=cls.user_id, date=datetime.datetime(2015, 1, 1, 0, 0)) ] sync_logs_other = [SyncLog(user_id='other')] cls.docs = cls.sync_logs + sync_logs_other for doc in cls.docs: doc.save() update_analytics_indexes()
def setUp(self): # clear cases, forms, logs for item in XFormInstance.view("couchforms/by_xmlns", include_docs=True, reduce=False).all(): item.delete() for case in CommCareCase.view("case/by_user", reduce=False, include_docs=True).all(): case.delete() for log in SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(): log.delete() self.user = User(user_id=USER_ID, username="******", password="******", date_joined=datetime(2011, 6, 9)) # this creates the initial blank sync token in the database generate_restore_payload(self.user) [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self.sync_log = sync_log
def testCacheNonInvalidation(self): original_payload = RestoreConfig( self.user, version=V2, caching_enabled=True, restore_id=self.sync_log._id, ).get_payload() self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) # posting a case associated with this sync token should invalidate the cache # submitting a case not with the token will not touch the cache for that token case_id = "cache_noninvalidation" post_case_blocks([CaseBlock( create=True, case_id=case_id, user_id=self.user.user_id, owner_id=self.user.user_id, case_type=PARENT_TYPE, version=V2, ).as_xml()]) next_payload = RestoreConfig( self.user, version=V2, caching_enabled=True, restore_id=self.sync_log._id, ).get_payload() self.assertEqual(original_payload, next_payload) self.assertFalse(case_id in next_payload)
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() # implicit length assertion _, _, [newcase] = submit_form_locally(xml_data, domain=self.project.name) expected_case_block = """ <case> <case_id>asdf</case_id> <date_modified>2010-06-29T13:42:50.000000Z</date_modified> <create> <case_type_id>test_case_type</case_type_id> <user_id>foo</user_id> <case_name>test case name</case_name> <external_id>someexternal</external_id> </create> </case>""" check_xml_line_by_line(self, expected_case_block, xml.get_case_xml(newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE])) # check v2 expected_v2_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2" > <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" check_xml_line_by_line( self, expected_v2_case_block, xml.get_case_xml( newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE], version="2.0", ), ) restore_payload = generate_restore_payload( project=self.project, user=dummy_user(), items=True, ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4), restore_payload )
def sync_logs_for_user(user_id): logs = SyncLog.view("phone/sync_logs_by_user", reduce=False, startkey=[user_id], endkey=[user_id, {}], include_docs=True) return render_to_string("phone/partials/sync_log_for_chw_table.html", {"sync_data": logs})
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() FormProcessorInterface.submit_form_locally(xml_data, self.domain) expected_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2"> <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" restore_payload = generate_restore_payload( project=Domain(name=self.domain), user=dummy_user(), items=True, version=V3 ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4), restore_payload )
def testMismatch(self): self.assertEqual(CaseStateHash(EMPTY_HASH), self.sync_log.get_state_hash()) c1 = CaseBlock(case_id="abc123", create=True, owner_id=self.user.user_id).as_xml() c2 = CaseBlock(case_id="123abc", create=True, owner_id=self.user.user_id).as_xml() post_case_blocks([c1, c2], form_extras={"last_sync_token": self.sync_log.get_id}) self.sync_log = SyncLog.get(self.sync_log.get_id) real_hash = CaseStateHash("409c5c597fa2c2a693b769f0d2ad432b") bad_hash = CaseStateHash("thisisntright") self.assertEqual(real_hash, self.sync_log.get_state_hash()) generate_restore_payload(self.user, self.sync_log.get_id, version=V2, state_hash=str(real_hash)) try: generate_restore_payload(self.user, self.sync_log.get_id, version=V2, state_hash=str(bad_hash)) self.fail("Call to generate a payload with a bad hash should fail!") except BadStateException, e: self.assertEqual(real_hash, e.expected) self.assertEqual(bad_hash, e.actual) self.assertEqual(2, len(e.case_ids)) self.assertTrue("abc123" in e.case_ids) self.assertTrue("123abc" in e.case_ids)
def testOtherUserUpdatesUnowned(self): # create a case from one user and assign ownership elsewhere case_id = "other_user_updates_unowned" self._createCaseStubs([case_id], owner_id=OTHER_USER_ID) # sync and update from another user check_user_has_case(self, self.other_user, CaseBlock(case_id=case_id, version=V2).as_xml(), should_have=True, line_by_line=False, restore_id=self.other_sync_log.get_id, version=V2) self.other_sync_log = SyncLog.last_for_user(OTHER_USER_ID) update = CaseBlock( create=False, case_id=case_id, user_id=OTHER_USER_ID, version=V2, update={'greeting': 'hello'} ).as_xml() self._postFakeWithSyncToken( update, self.other_sync_log.get_id ) # original user syncs again # make sure there are no new changes # sync and update from another user check_user_has_case(self, self.user, update, should_have=False, restore_id=self.sync_log.get_id, version=V2)
def update_sync_log_with_checks(sync_log, xform, cases, case_db, case_id_blacklist=None): assert case_db is not None from casexml.apps.case.xform import CaseProcessingConfig case_id_blacklist = case_id_blacklist or [] try: sync_log.update_phone_lists(xform, cases) except SyncLogAssertionError, e: if e.case_id and e.case_id not in case_id_blacklist: form_ids = get_case_xform_ids(e.case_id) case_id_blacklist.append(e.case_id) for form_id in form_ids: if form_id != xform._id: form = XFormInstance.get(form_id) if form.doc_type in ['XFormInstance', 'XFormError']: reprocess_form_cases( form, CaseProcessingConfig( strict_asserts=True, case_id_blacklist=case_id_blacklist ), case_db=case_db ) updated_log = SyncLog.get(sync_log._id) update_sync_log_with_checks(updated_log, xform, cases, case_db, case_id_blacklist=case_id_blacklist)
def test_indices(self): parents = ['catelyn', 'ned', 'cersei', 'jaimie'] index_structure = { 'bran': [ {'identifier': 'mom', 'referenced_id': 'catelyn'}, {'identifier': 'dad', 'referenced_id': 'ned'}, ], 'myrcella': [ {'identifier': 'mom', 'referenced_id': 'cersei'}, {'identifier': 'dad', 'referenced_id': 'jaimie'}, ] } sync_log = SyncLog( cases_on_phone=[ CaseState(case_id='bran', indices=[ CommCareCaseIndex(**args) for args in index_structure['bran'] ]), CaseState(case_id='myrcella', indices=[ CommCareCaseIndex(**args) for args in index_structure['myrcella'] ]) ], dependent_cases_on_phone=[ CaseState(case_id=parent) for parent in parents ] ) migrated = SimplifiedSyncLog.from_other_format(sync_log) for case_id, indices in index_structure.items(): self.assertTrue(case_id in migrated.index_tree.indices) for index in indices: self.assertEqual(index['referenced_id'], migrated.index_tree.indices[case_id][index['identifier']]) for parent in parents: self.assertTrue(parent in migrated.case_ids_on_phone) self.assertTrue(parent in migrated.dependent_case_ids_on_phone)
def testOtherUserEdits(self): # create a case by one user case_id = "other_user_edits" self._createCaseStubs([case_id], owner_id=SHARED_ID) # sync to the other's phone to be able to edit check_user_has_case(self, self.other_user, CaseBlock(case_id=case_id, version=V2).as_xml(), should_have=True, line_by_line=False, restore_id=self.other_sync_log.get_id, version=V2) latest_sync = SyncLog.last_for_user(OTHER_USER_ID) # update from another self._postFakeWithSyncToken( CaseBlock(create=False, case_id=case_id, user_id=OTHER_USER_ID, version=V2, update={'greeting': "Hello!"} ).as_xml(), latest_sync.get_id) # original user syncs again # make sure updates take updated_case = CaseBlock(create=False, case_id=case_id, user_id=USER_ID, version=V2, update={'greeting': "Hello!"}).as_xml() match = check_user_has_case(self, self.user, updated_case, should_have=True, line_by_line=False, restore_id=self.sync_log.get_id, version=V2) self.assertTrue("Hello!" in ElementTree.tostring(match))
def testCacheNonInvalidation(self): original_payload = RestoreConfig( self.user, version=V2, caching_enabled=True, restore_id=self.sync_log._id, ).get_payload() self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) # posting a case associated with this sync token should invalidate the cache # submitting a case not with the token will not touch the cache for that token case_id = "cache_noninvalidation" post_case_blocks([ CaseBlock( create=True, case_id=case_id, user_id=self.user.user_id, owner_id=self.user.user_id, case_type=PARENT_TYPE, version=V2, ).as_xml() ]) next_payload = RestoreConfig( self.user, version=V2, caching_enabled=True, restore_id=self.sync_log._id, ).get_payload() self.assertEqual(original_payload, next_payload) self.assertFalse(case_id in next_payload)
def testCaching(self): self.assertFalse(self.sync_log.has_cached_payload(V2)) # first request should populate the cache original_payload = RestoreConfig( self.user, version=V2, restore_id=self.sync_log._id, ).get_payload().as_string() next_sync_log = synclog_from_restore_payload(original_payload) self.sync_log = SyncLog.get(self.sync_log._id) self.assertTrue(self.sync_log.has_cached_payload(V2)) # a second request with the same config should be exactly the same cached_payload = RestoreConfig( self.user, version=V2, restore_id=self.sync_log._id, ).get_payload().as_string() self.assertEqual(original_payload, cached_payload) # caching a different version should also produce something new versioned_payload = RestoreConfig( self.user, version=V1, restore_id=self.sync_log._id, ).get_payload().as_string() self.assertNotEqual(original_payload, versioned_payload) versioned_sync_log = synclog_from_restore_payload(versioned_payload) self.assertNotEqual(next_sync_log._id, versioned_sync_log._id)
def testOtherUserUpdatesUnowned(self): # create a case from one user and assign ownership elsewhere case_id = "other_user_updates_unowned" self._createCaseStubs([case_id], owner_id=OTHER_USER_ID) # sync and update from another user assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id) self.other_sync_log = SyncLog.last_for_user(OTHER_USER_ID) update = CaseBlock(create=False, case_id=case_id, user_id=OTHER_USER_ID, version=V2, update={ 'greeting': 'hello' }).as_xml() self._postFakeWithSyncToken(update, self.other_sync_log.get_id) # original user syncs again # make sure there are no new changes assert_user_doesnt_have_case(self, self.user, case_id, restore_id=self.sync_log.get_id)
def test_update_dependent_case(self): sync_log = SyncLog( cases_on_phone=[ CaseState( case_id='bran', indices=[ CommCareCaseIndex(identifier='legs', referenced_id='hodor') ], ), ], dependent_cases_on_phone=[CaseState(case_id='hodor')], user_id="someuser") xform_id = uuid.uuid4().hex xform = XFormInstance(_id=xform_id) form_actions = [CommCareCaseAction(action_type=CASE_ACTION_UPDATE, )] with patch.object(CommCareCase, 'get_actions_for_form', return_value=form_actions): parent_case = CommCareCase(_id='hodor') # before this test was added, the following call raised a SyncLogAssertionError on legacy logs. # this test just ensures it doesn't still do that. for log in [ sync_log, SimplifiedSyncLog.from_other_format(sync_log) ]: log.update_phone_lists(xform, [parent_case])
def rows(self): rows = [] user_ids = map(lambda user: user.user_id, self.users) user_xform_dicts_map = get_last_form_submissions_by_user( self.domain, user_ids, self.selected_app_id) for user in self.users: xform_dict = last_seen = last_sync = app_name = None if user_xform_dicts_map.get(user.user_id): xform_dict = user_xform_dicts_map[user.user_id][0] if xform_dict: last_seen = string_to_utc_datetime( xform_dict.get('received_on')) if xform_dict.get('app_id'): try: app = get_app(self.domain, xform_dict.get('app_id')) except ResourceNotFound: pass else: app_name = app.name else: app_name = get_meta_appversion_text( xform_dict['form']['meta']) app_version_info = get_app_version_info( self.domain, xform_dict.get('build_id'), xform_dict.get('version'), xform_dict['form']['meta'], ) build_html = _build_html(app_version_info) commcare_version = ('CommCare {}'.format( app_version_info.commcare_version) if app_version_info.commcare_version else _("Unknown CommCare Version")) commcare_version_html = mark_safe( '<span class="label label-info">{}</span>'.format( commcare_version)) app_name = app_name or _("Unknown App") app_name = format_html(u'{} {} {}', app_name, mark_safe(build_html), commcare_version_html) if app_name is None and self.selected_app_id: continue last_sync_log = SyncLog.last_for_user(user.user_id) if last_sync_log: last_sync = last_sync_log.date rows.append([ user.username_in_report, _fmt_date(last_seen), _fmt_date(last_sync), app_name or "---" ]) return rows
def process_cases(sender, xform, **kwargs): """Creates or updates case objects which live outside of the form""" # recursive import fail from casexml.apps.case.xform import get_or_update_cases # avoid Document conflicts cases = get_or_update_cases(xform).values() # attach domain if it's there if hasattr(xform, "domain"): domain = xform.domain def attach_domain(case): case.domain = domain if domain and hasattr(case, 'type'): case['#export_tag'] = ["domain", "type"] return case cases = [attach_domain(case) for case in cases] # HACK -- figure out how to do this more properly # todo: create a pillow for this if cases: case = cases[0] if case.location_ is not None: # should probably store this in computed_ xform.location_ = list(case.location_) # handle updating the sync records for apps that use sync mode if hasattr(xform, "last_sync_token") and xform.last_sync_token: relevant_log = SyncLog.get(xform.last_sync_token) relevant_log.update_phone_lists(xform, cases) # set flags for indicator pillows and save xform.initial_processing_complete = True xform.save() for case in cases: case.initial_processing_complete = True case.save()
def testOtherUserAddsIndex(self): # create a case from one user case_id = "other_user_adds_index" self._createCaseStubs([case_id], owner_id=SHARED_ID) # sync to the other's phone to be able to edit check_user_has_case(self, self.other_user, CaseBlock(case_id=case_id, version=V2).as_xml(), should_have=True, line_by_line=False, restore_id=self.other_sync_log.get_id, version=V2) latest_sync = SyncLog.last_for_user(OTHER_USER_ID) mother_id = "other_user_adds_index_mother" # parent case parent_case = CaseBlock( create=True, case_id=mother_id, user_id=OTHER_USER_ID, case_type=PARENT_TYPE, version=V2, ).as_xml() self._postFakeWithSyncToken( parent_case, latest_sync.get_id ) # the original user should not get the parent case check_user_has_case(self, self.user, parent_case, should_have=False, restore_id=self.sync_log.get_id, version=V2) # update the original case from another, adding an indexed case self._postFakeWithSyncToken( CaseBlock( create=False, case_id=case_id, user_id=OTHER_USER_ID, owner_id=USER_ID, version=V2, index={'mother': ('mother', mother_id)} ).as_xml(), latest_sync.get_id ) # original user syncs again # make sure index updates take and indexed case also syncs expected_parent_case = CaseBlock( create=True, case_id=mother_id, user_id=OTHER_USER_ID, case_type=PARENT_TYPE, owner_id=OTHER_USER_ID, version=V2, ).as_xml() check_user_has_case(self, self.user, expected_parent_case, restore_id=self.sync_log.get_id, version=V2) orig = check_user_has_case(self, self.user, CaseBlock(case_id=case_id, version=V2).as_xml(), line_by_line=False, restore_id=self.sync_log.get_id, version=V2) self.assertTrue("index" in ElementTree.tostring(orig))
def testOtherUserEdits(self): # create a case by one user case_id = "other_user_edits" self._createCaseStubs([case_id], owner_id=SHARED_ID) # sync to the other's phone to be able to edit assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id) latest_sync = SyncLog.last_for_user(OTHER_USER_ID) # update from another self._postFakeWithSyncToken( CaseBlock(create=False, case_id=case_id, user_id=OTHER_USER_ID, version=V2, update={ 'greeting': "Hello!" }).as_xml(), latest_sync.get_id) # original user syncs again # make sure updates take match = assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id) self.assertTrue("Hello!" in ElementTree.tostring(match))
def test_update_dependent_case_owner_still_present(self): dependent_case_state = CaseState(case_id="d1", indices=[]) sync_log = SyncLog(domain="domain", user_id="user", cases_on_phone=[ CaseState(case_id="c1", indices=[ CommCareCaseIndex( identifier="d1-id", referenced_id="d1") ]) ], dependent_cases_on_phone=[dependent_case_state], owner_ids_on_phone=['user1']) xform_id = uuid.uuid4().hex xform = XFormInstance(_id=xform_id) form_actions = [ CommCareCaseAction(action_type=CASE_ACTION_UPDATE, updated_known_properties={'owner_id': 'user2'}) ] with patch.object(CommCareCase, 'get_actions_for_form', return_value=form_actions): parent_case = CommCareCase(_id='d1') # before this test was added, the following call raised a ValueError on legacy logs. for log in [ sync_log, SimplifiedSyncLog.from_other_format(sync_log) ]: log.update_phone_lists(xform, [parent_case]) self.assertIn(dependent_case_state, log.test_only_get_dependent_cases_on_phone())
def _process_cases(xform, config, case_db): cases = get_or_update_cases(xform, case_db).values() if config.reconcile: for c in cases: c.reconcile_actions(rebuild=True) # attach domain and export tag if domain is there if hasattr(xform, "domain"): domain = xform.domain def attach_extras(case): case.domain = domain if domain: assert hasattr(case, "type") case["#export_tag"] = ["domain", "type"] return case cases = [attach_extras(case) for case in cases] # handle updating the sync records for apps that use sync mode last_sync_token = getattr(xform, "last_sync_token", None) if last_sync_token: relevant_log = SyncLog.get(last_sync_token) # in reconciliation mode, things can be unexpected relevant_log.strict = config.strict_asserts from casexml.apps.case.util import update_sync_log_with_checks update_sync_log_with_checks(relevant_log, xform, cases, case_id_blacklist=config.case_id_blacklist) if config.reconcile: relevant_log.reconcile_cases() relevant_log.save() try: cases_received.send(sender=None, xform=xform, cases=cases) except Exception as e: # don't let the exceptions in signals prevent standard case processing notify_exception( None, "something went wrong sending the cases_received signal " "for form %s: %s" % (xform._id, e) ) for case in cases: if not case.check_action_order(): try: case.reconcile_actions(rebuild=True) except ReconciliationError: pass case.force_save() # set flags for indicator pillows and save xform.initial_processing_complete = True # if there are pillows or other _changes listeners competing to update # this form, override them. this will create a new entry in the feed # that they can re-pick up on xform.save(force_update=True) return cases
def testOtherUserAddsIndex(self): time = datetime.utcnow() # create a case from one user case_id = "other_user_adds_index" self._createCaseStubs([case_id], owner_id=SHARED_ID) # sync to the other's phone to be able to edit assert_user_has_case(self, self.other_user, case_id, restore_id=self.other_sync_log.get_id) latest_sync = SyncLog.last_for_user(OTHER_USER_ID) mother_id = "other_user_adds_index_mother" parent_case = CaseBlock( create=True, date_modified=time, case_id=mother_id, user_id=OTHER_USER_ID, case_type=PARENT_TYPE, version=V2, ).as_xml(format_datetime=json_format_datetime) self._postFakeWithSyncToken( parent_case, latest_sync.get_id ) # the original user should not get the parent case assert_user_doesnt_have_case(self, self.user, mother_id, restore_id=self.sync_log.get_id) # update the original case from another, adding an indexed case self._postFakeWithSyncToken( CaseBlock( create=False, case_id=case_id, user_id=OTHER_USER_ID, owner_id=USER_ID, version=V2, index={'mother': ('mother', mother_id)} ).as_xml(format_datetime=json_format_datetime), latest_sync.get_id ) # original user syncs again # make sure index updates take and indexed case also syncs expected_parent_case = CaseBlock( create=True, date_modified=time, case_id=mother_id, user_id=OTHER_USER_ID, case_type=PARENT_TYPE, owner_id=OTHER_USER_ID, version=V2, ).as_xml(format_datetime=json_format_datetime) check_user_has_case(self, self.user, expected_parent_case, restore_id=self.sync_log.get_id, version=V2, purge_restore_cache=True) _, orig = assert_user_has_case(self, self.user, case_id, restore_id=self.sync_log.get_id) self.assertTrue("index" in ElementTree.tostring(orig))