def test_synced_during_and_after_bug_resolution_returns_200(self): during = SimplifiedSyncLog( user_id=self.restore_user.user_id, date=datetime(2016, 7, 19, 20, 0) # during bug ) during.save() after = SimplifiedSyncLog( user_id=self.restore_user.user_id, previous_log_id=during._id, date=datetime(2016, 7, 21, 19, 0) # after resolution ) after.save() restore_config = RestoreConfig( project=self.project, restore_user=self.restore_user, params=RestoreParams( sync_log_id=after._id, version="2.0", ), cache_settings=RestoreCacheSettings() ) response = restore_config.get_response() self.assertEqual(response.status_code, 200)
def test_should_sync_when_changed(self, *args): self.user._couch_user.add_to_assigned_locations( self.locations['Boston']) last_sync_time = datetime.utcnow() sync_log = SimplifiedSyncLog(date=last_sync_time) locations_queryset = SQLLocation.objects.filter( pk=self.locations['Boston'].pk) restore_state = MockRestoreState(self.user, RestoreParams()) self.assertFalse( should_sync_locations(sync_log, locations_queryset, restore_state)) self.assertEqual( len( call_fixture_generator(related_locations_fixture_generator, self.user, last_sync=sync_log)), 0) LocationRelation.objects.create(location_a=self.locations["Revere"], location_b=self.locations["Boston"]) self.assertTrue( should_sync_locations(SimplifiedSyncLog(date=last_sync_time), locations_queryset, restore_state)) # length 2 for index definition + data self.assertEqual( len( call_fixture_generator(related_locations_fixture_generator, self.user, last_sync=sync_log)), 2)
def test_synced_before_and_after_bug_resolution_200(self): before = SimplifiedSyncLog( user_id=self.restore_user.user_id, date=datetime(2016, 7, 19, 18, 0) # synced before bug was introduced ) before.save() restore_config = RestoreConfig(project=self.project, restore_user=self.restore_user, params=RestoreParams( sync_log_id=before._id, version="2.0", ), cache_settings=RestoreCacheSettings()) response = restore_config.get_response() self.assertEqual(response.status_code, 200) after = SimplifiedSyncLog( user_id=self.restore_user.user_id, previous_log_id=before._id, date=datetime(2016, 7, 21, 19, 0) # after resolution ) after.save() restore_config = RestoreConfig(project=self.project, restore_user=self.restore_user, params=RestoreParams( sync_log_id=after._id, version="2.0", ), cache_settings=RestoreCacheSettings()) response = restore_config.get_response() self.assertEqual(response.status_code, 200)
def test_archiving_location_should_resync(self): """ When locations are archived, we should resync them """ location = make_location( domain=self.domain, name='winterfell', location_type=self.location_type.name, ) location.save() after_save = datetime.utcnow() self.assertEqual('winterfell', location.name) locations_queryset = SQLLocation.objects.filter(pk=location.pk) restore_state = MockRestoreState(self.user.to_ota_restore_user(), RestoreParams()) # Should not resync if last sync was after location save self.assertFalse( should_sync_locations(SimplifiedSyncLog(date=after_save), locations_queryset, restore_state)) # archive the location location.archive() after_archive = datetime.utcnow() location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) # Should resync if last sync was after location was saved but before location was archived self.assertTrue( should_sync_locations(SimplifiedSyncLog(date=after_save), locations_queryset, restore_state)) # Should not resync if last sync was after location was deleted self.assertFalse( should_sync_locations(SimplifiedSyncLog(date=after_archive), locations_queryset, restore_state))
def test_should_sync_timezone(self): domain = Domain(name='test', default_timezone='Africa/Johannesburg') # yesterday at 21:59:59 = yesterday at 23:59:59 locally last_sync = datetime.combine(date.today() - timedelta(days=1), time(21, 59, 59)) # yesterday at 21:59:59 = today at 00:00:00 locally utcnow = datetime.combine(date.today() - timedelta(days=1), time(22, 00, 00)) self.assertTrue(should_sync(domain, SimplifiedSyncLog(date=last_sync), utcnow=utcnow)) domain = Domain(name='test', default_timezone='UTC') self.assertFalse(should_sync(domain, SimplifiedSyncLog(date=last_sync), utcnow=utcnow))
def test_update(self): synclog = SimplifiedSyncLog(domain='test', user_id='user1', date=datetime(2015, 7, 1, 0, 0)) synclog.save() with self.assertNumQueries(1): # previously this was 2 queries, fetch + update synclog.save()
def test_sync_log(self): from casexml.apps.phone.models import SyncLog, SimplifiedSyncLog from corehq.apps.users.models import WebUser, CommCareUser from casexml.apps.phone.models import get_sync_log_class_by_format web_user = WebUser.create( domain=self.domain_name, username='******', password='******', email='*****@*****.**', ) mobile_user = CommCareUser.create( self.domain_name, 'mobile_user1', 'secret' ) other_user = CommCareUser.create( 'other_domain', 'mobile_user2', 'secret' ) self.addCleanup(other_user.delete) l1 = SyncLog(user_id=web_user._id) l1.save() l2 = SimplifiedSyncLog(user_id=mobile_user._id) l2.save() other_log = SyncLog(user_id=other_user._id) other_log.save() def _synclog_to_class(doc): if doc['doc_type'] == 'SyncLog': return get_sync_log_class_by_format(doc.get('log_format')) expected_docs = [web_user, mobile_user, l1, l2] not_expected_docs = [other_user, other_log] self._dump_and_load(expected_docs, not_expected_docs, doc_to_doc_class=_synclog_to_class)
def test_purge_tiered_bottom_up(self): [grandparent_id, parent_id, child_id] = all_ids = ['grandparent', 'parent', 'child'] tree = IndexTree( indices={ child_id: convert_list_to_dict([parent_id]), parent_id: convert_list_to_dict([grandparent_id]), }) sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids)) # just purging the child should purge just the child sync_log.purge(child_id) self.assertTrue(grandparent_id in sync_log.case_ids_on_phone) self.assertTrue(parent_id in sync_log.case_ids_on_phone) self.assertFalse(child_id in sync_log.case_ids_on_phone) # same for the parent sync_log.purge(parent_id) self.assertTrue(grandparent_id in sync_log.case_ids_on_phone) self.assertFalse(parent_id in sync_log.case_ids_on_phone) # same for the grandparentparent sync_log.purge(grandparent_id) self.assertFalse(grandparent_id in sync_log.case_ids_on_phone)
def setUpClass(cls): db = SimplifiedSyncLog.get_db() # datetime.min is not compatible for `json_format_datetime` for synclog_id in get_synclog_ids_by_date(datetime(1970, 1, 1), datetime.max): db.delete_doc(synclog_id) # Needed because other tests do not always clean up their users. delete_all_users() hard_delete_deleted_users() cls.g1 = Group(domain=cls.domain, name='group') cls.g1.save() cls.g2 = Group(domain=cls.domain, name='group') cls.g2.soft_delete() cls.domain_obj = Domain( name=cls.domain, is_active=True, ) cls.domain_obj.save() cls.web_user = WebUser.create(cls.domain, 'web-user', '***') cls.commcare_user = CommCareUser.create(cls.domain, 'cc-user', '***') cls.commcare_user.retire() cls.synclog = SimplifiedSyncLog( domain=cls.domain, build_id='1234', user_id='5678', date=datetime.utcnow(), ) cls.synclog.save()
def test_purge_multiple_parents(self): [grandparent_id, mother_id, father_id, child_id] = all_ids = ['heart-tree', 'catelyn', 'ned', 'arya'] tree = IndexTree( indices={ child_id: convert_list_to_dict([mother_id, father_id]), mother_id: convert_list_to_dict([grandparent_id]), father_id: convert_list_to_dict([grandparent_id]), }) sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids)) # first purge everything but the child sync_log.purge(grandparent_id) sync_log.purge(mother_id) sync_log.purge(father_id) # everything should still be relevant because of the child for id in all_ids: self.assertTrue(id in sync_log.case_ids_on_phone) # purging the child should wipe everything else sync_log.purge(child_id) for id in all_ids: self.assertFalse(id in sync_log.case_ids_on_phone) self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_purge_multiple_children(self): [grandparent_id, parent_id, child_id_1, child_id_2] = all_ids = ['rickard', 'ned', 'bran', 'arya'] tree = IndexTree( indices={ child_id_1: convert_list_to_dict([parent_id]), child_id_2: convert_list_to_dict([parent_id]), parent_id: convert_list_to_dict([grandparent_id]), }) sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids)) # first purge the parent and grandparent sync_log.purge(grandparent_id) sync_log.purge(parent_id) self.assertTrue(grandparent_id in sync_log.case_ids_on_phone) self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone) self.assertTrue(parent_id in sync_log.case_ids_on_phone) self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone) # just purging one child should preserve the parent index sync_log.purge(child_id_1) self.assertTrue(grandparent_id in sync_log.case_ids_on_phone) self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone) self.assertTrue(parent_id in sync_log.case_ids_on_phone) self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone) self.assertFalse(child_id_1 in sync_log.case_ids_on_phone) # purging the other one should wipe it sync_log.purge(child_id_2) for id in all_ids: self.assertFalse(id in sync_log.case_ids_on_phone) self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_pillow(self): from corehq.apps.change_feed.topics import get_topic_offset from corehq.pillows.synclog import get_user_sync_history_pillow consumer = get_test_kafka_consumer(topics.SYNCLOG_SQL) # get the seq id before the change is published kafka_seq = get_topic_offset(topics.SYNCLOG_SQL) # make sure user has empty reporting-metadata before a sync ccuser = CommCareUser.get(self.ccuser._id) self.assertEqual(ccuser.reporting_metadata.last_syncs, []) # do a sync synclog = SimplifiedSyncLog(domain=self.domain.name, user_id=self.ccuser._id, date=datetime.datetime(2015, 7, 1, 0, 0), app_id='123') synclog.save() # make sure kafka change updates the user with latest sync info message = next(consumer) change_meta = change_meta_from_kafka_message(message.value) synclog = self._get_latest_synclog() self.assertEqual(change_meta.document_id, synclog._id) self.assertEqual(change_meta.domain, self.domain.name) # make sure processor updates the user correctly pillow = get_user_sync_history_pillow() pillow.process_changes(since=kafka_seq) process_reporting_metadata_staging() ccuser = CommCareUser.get(self.ccuser._id) self.assertEqual(len(ccuser.reporting_metadata.last_syncs), 1) self.assertEqual(ccuser.reporting_metadata.last_syncs[0].sync_date, synclog.date) self.assertEqual(ccuser.reporting_metadata.last_sync_for_user.sync_date, synclog.date)
def test_purge_tiered_top_down(self): [grandparent_id, parent_id, child_id] = all_ids = ['grandparent', 'parent', 'child'] tree = IndexTree( indices={ child_id: convert_list_to_dict([parent_id]), parent_id: convert_list_to_dict([grandparent_id]), }) sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set(all_ids)) # this has no effect other than to move the grandparent to dependent sync_log.purge(grandparent_id) for id in all_ids: self.assertTrue(id in sync_log.case_ids_on_phone) self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone) self.assertFalse(parent_id in sync_log.dependent_case_ids_on_phone) self.assertFalse(child_id in sync_log.dependent_case_ids_on_phone) # likewise, this should have no effect other than to move the parent to dependent sync_log.purge(parent_id) for id in all_ids: self.assertTrue(id in sync_log.case_ids_on_phone) self.assertTrue(grandparent_id in sync_log.dependent_case_ids_on_phone) self.assertTrue(parent_id in sync_log.dependent_case_ids_on_phone) self.assertFalse(child_id in sync_log.dependent_case_ids_on_phone) # this should now purge everything sync_log.purge(child_id) for id in all_ids: self.assertFalse(id in sync_log.case_ids_on_phone) self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_update_dependent_case_owner_still_present(self): sync_log = SimplifiedSyncLog( domain="domain", case_ids_on_phone={'c1', 'd1'}, dependent_case_ids_on_phone={'d1'}, index_tree=IndexTree(indices={'c1': { 'd1-id': 'd1' }}), user_id="user", owner_ids_on_phone={'user1'}) xform_id = uuid.uuid4().hex xform = create_form_for_test("domain", form_id=xform_id, save=False) form_actions = [ CaseAction( action_type=CASE_ACTION_UPDATE, updated_known_properties={'owner_id': 'user2'}, indices=[], ) ] with patch.object(CommCareCase, 'get_actions_for_form', return_value=form_actions): parent_case = CommCareCase(case_id='d1') # before this test was added, the following call raised a ValueError on legacy logs. sync_log.update_phone_lists(xform, [parent_case]) self.assertIn("d1", sync_log.dependent_case_ids_on_phone)
def test_default_mobile_ucr_sync_interval(self): """ When sync interval is set, ReportFixturesProvider should provide reports only if the interval has passed since the last sync or a new build is being requested. """ from corehq.apps.userreports.reports.data_source import ConfigurableReportDataSource with patch.object(ConfigurableReportDataSource, 'get_data') as get_data_mock: get_data_mock.return_value = self.rows with mock_datasource_config(): self.domain_obj.default_mobile_ucr_sync_interval = 4 # hours two_hours_ago = datetime.utcnow() - timedelta(hours=2) recent_sync = SimplifiedSyncLog( domain=self.domain_obj.name, date=two_hours_ago, user_id='456', build_id=self.app1.get_id, ) recent_sync.save() fixtures = call_fixture_generator(report_fixture_generator, self.user, app=self.app1, last_sync=recent_sync, project=self.domain_obj) reports = self._get_fixture(fixtures, ReportFixturesProviderV1.id) self.assertIsNone(reports) recent_sync_new_build = SimplifiedSyncLog( domain=self.domain_obj.name, date=two_hours_ago, user_id='456', build_id='123', ) recent_sync_new_build.save() fixtures = call_fixture_generator( report_fixture_generator, self.user, app=self.app1, last_sync=recent_sync_new_build, project=self.domain_obj) reports = self._get_fixture( fixtures, ReportFixturesProviderV1.id).findall('.//report') self.assertEqual(len(reports), 1) self.assertEqual(reports[0].attrib.get('id'), '123456') self.domain_obj.default_mobile_ucr_sync_interval = None
def test_force_empty_when_user_has_no_locations(self, *args): sync_log = SimplifiedSyncLog(date=datetime.utcnow()) # no relations have been touched since this SimplifiedSyncLog, but it still pushes down the empty list self.assertEqual( len( call_fixture_generator(related_locations_fixture_generator, self.user, last_sync=sync_log)), 2)
def test_purge_self_indexing(self): [id] = ['recursive'] tree = IndexTree(indices={ id: convert_list_to_dict([id]), }) sync_log = SimplifiedSyncLog(index_tree=tree, case_ids_on_phone=set([id])) sync_log.purge(id) self.assertFalse(id in sync_log.case_ids_on_phone) self.assertFalse(id in sync_log.dependent_case_ids_on_phone)
def test_should_sync_locations_change_location_type(self): """ When location_type gets changed, we should resync locations """ yesterday = datetime.today() - timedelta(1) day_before_yesterday = yesterday - timedelta(1) LocationType.objects.all().update(last_modified=day_before_yesterday ) # Force update because of auto_now self.location_type = LocationType.objects.last() location = SQLLocation( domain=self.domain, name="Meereen", location_type=self.location_type, metadata={ 'queen': "Daenerys Targaryen", 'rebels': "Sons of the Harpy" }, ) location.save() SQLLocation.objects.filter(pk=location.pk).update( last_modified=day_before_yesterday) location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) restore_state = MockRestoreState(self.user.to_ota_restore_user(), RestoreParams()) self.assertFalse( should_sync_locations(SimplifiedSyncLog(date=yesterday), locations_queryset, restore_state)) self.location_type.shares_cases = True self.location_type.save() location = SQLLocation.objects.last() locations_queryset = SQLLocation.objects.filter(pk=location.pk) self.assertTrue( should_sync_locations(SimplifiedSyncLog(date=yesterday), locations_queryset, restore_state))
def test_purge_extension_non_dependent_host(self): """Purging an extension should not remove the host or itself if the host is directly owned """ [host_id, extension_id] = all_ids = ['host', 'extension'] extension_tree = IndexTree(indices={ extension_id: convert_list_to_dict([host_id]), }) sync_log = SimplifiedSyncLog(extension_index_tree=extension_tree, case_ids_on_phone=set(all_ids)) sync_log.purge(extension_id) self.assertTrue(extension_id in sync_log.case_ids_on_phone) self.assertTrue(host_id in sync_log.case_ids_on_phone)
def test_return_412_between_bug_dates(self): log = SimplifiedSyncLog(user_id=self.restore_user.user_id, date=datetime(2016, 7, 19, 19, 20)) log.save() restore_config = RestoreConfig(project=self.project, restore_user=self.restore_user, params=RestoreParams( sync_log_id=log._id, version="2.0", ), cache_settings=RestoreCacheSettings()) response = restore_config.get_response() self.assertEqual(response.status_code, 412)
def test_purge_partial_children(self): [parent_id, child_id_1, child_id_2] = all_ids = ['parent', 'child1', 'child2'] tree = IndexTree(indices={ child_id_1: convert_list_to_dict([parent_id]), child_id_2: convert_list_to_dict([parent_id]), }) sync_log = SimplifiedSyncLog( index_tree=tree, case_ids_on_phone=set(all_ids), dependent_case_ids_on_phone=set([parent_id, child_id_2]) ) # this used to fail with an AssertionError sync_log.purge(parent_id)
def test_sync_log_invalidation_bug(self): sync_log = SimplifiedSyncLog( user_id='6dac4940-913e-11e0-9d4b-005056aa7fb5') sync_log.save() self.addCleanup(FormProcessorTestUtils.delete_all_sync_logs) _, case = self._doCreateCaseWithMultimedia() # this used to fail before we fixed http://manage.dimagi.com/default.asp?158373 self._doSubmitUpdateWithMultimedia( new_attachments=['commcare_logo_file'], removes=[], sync_token=sync_log._id)
def test_changed_build_id(self): app = MockApp('project_default', 'build_1') restore_state = MockRestoreState(self.user.to_ota_restore_user(), RestoreParams(app=app)) sync_log_from_old_app = SimplifiedSyncLog(date=datetime.utcnow(), build_id=app.get_id) self.assertFalse( should_sync_locations(sync_log_from_old_app, SQLLocation.objects.all(), restore_state) ) new_build = MockApp('project_default', 'build_2') restore_state = MockRestoreState(self.user.to_ota_restore_user(), RestoreParams(app=new_build)) self.assertTrue( should_sync_locations(sync_log_from_old_app, SQLLocation.objects.all(), restore_state) )
def test_count_delete_queries(self): today = datetime.datetime.today() self.docs = [ SimplifiedSyncLog( date=today - datetime.timedelta(days=SYNCLOG_RETENTION_DAYS + 7)), SimplifiedSyncLog( date=today - datetime.timedelta(days=SYNCLOG_RETENTION_DAYS + 1)), SimplifiedSyncLog( date=today - datetime.timedelta(days=SYNCLOG_RETENTION_DAYS - 7)), ] for doc in self.docs: doc.domain = self.domain doc.user_id = self.user_id doc.save() self.assert_docs_equal(self._oldest_synclog(self.user_id), self.docs[0]) prune_synclogs() self.assert_docs_equal(self._oldest_synclog(self.user_id), self.docs[2])
def test_purge_extension(self, ): """Purging extension removes host """ [host_id, extension_id] = all_ids = ['host', 'extension'] extension_tree = IndexTree(indices={ extension_id: convert_list_to_dict([host_id]), }) sync_log = SimplifiedSyncLog(extension_index_tree=extension_tree, dependent_case_ids_on_phone=set([host_id]), case_ids_on_phone=set(all_ids)) sync_log.purge(extension_id) self.assertFalse(extension_id in sync_log.case_ids_on_phone) self.assertFalse(host_id in sync_log.case_ids_on_phone)
def synclog_data(): from casexml.apps.phone.models import SimplifiedSyncLog synclogs = [ SimplifiedSyncLog(domain='domain', user_id=uuid.uuid4().hex, date=datetime.utcnow()) for i in range(3) ] for synclog in synclogs: synclog.save() try: yield [synclog.get_id for synclog in synclogs] finally: for synclog in synclogs: synclog.delete()
def test_purge_extension_host_has_multiple_extensions(self): """Purging an extension should remove host and its other extensions """ [host_id, extension_id, extension_id_2] = all_ids = ['host', 'extension', 'extension_2'] extension_tree = IndexTree(indices={ extension_id: convert_list_to_dict([host_id]), extension_id_2: convert_list_to_dict([host_id]), }) sync_log = SimplifiedSyncLog(extension_index_tree=extension_tree, dependent_case_ids_on_phone=set([host_id, extension_id_2]), case_ids_on_phone=set(all_ids)) sync_log.purge(extension_id) self.assertFalse(extension_id in sync_log.case_ids_on_phone) self.assertFalse(extension_id_2 in sync_log.case_ids_on_phone) self.assertFalse(host_id in sync_log.case_ids_on_phone)
def setUpClass(cls): cls.user_id = 'lkasdhfadsloi' cls.sync_logs = [ SyncLog(user_id=cls.user_id, date=datetime.datetime(2015, 7, 1, 0, 0)), SimplifiedSyncLog(user_id=cls.user_id, date=datetime.datetime(2015, 3, 1, 0, 0)), SyncLog(user_id=cls.user_id, date=datetime.datetime(2015, 1, 1, 0, 0)) ] sync_logs_other = [SyncLog(user_id='other')] cls.docs = cls.sync_logs + sync_logs_other for doc in cls.docs: doc.save() update_analytics_indexes()
def test_open_extension_of_extension(self): all_ids = ['host', 'extension', 'extension_of_extension'] host_id, extension_id, extension_of_extension_id = all_ids extension_tree = IndexTree(indices={ extension_id: convert_list_to_dict([host_id]), extension_of_extension_id: convert_list_to_dict([extension_id]), }) sync_log = SimplifiedSyncLog(extension_index_tree=extension_tree, dependent_case_ids_on_phone=set([host_id, extension_id]), closed_cases=set([host_id, extension_id]), case_ids_on_phone=set(all_ids)) sync_log.purge(host_id) self.assertFalse(host_id in sync_log.case_ids_on_phone) self.assertFalse(extension_id in sync_log.case_ids_on_phone) self.assertFalse(extension_of_extension_id in sync_log.case_ids_on_phone)
def _new_sync_log(self): previous_log_id = None if self.is_initial else self.last_sync_log._id new_synclog = SimplifiedSyncLog( _id=uuid.uuid1().hex.lower(), domain=self.restore_user.domain, build_id=self.params.app_id, user_id=self.restore_user.user_id, owner_ids_on_phone=set(self.owner_ids), date=datetime.utcnow(), previous_log_id=previous_log_id, extensions_checked=True, device_id=self.params.device_id, ) if self.is_livequery: new_synclog.log_format = LOG_FORMAT_LIVEQUERY return new_synclog