def test_selective_product_sync(self): user = self.user expected_xml = self.generate_product_fixture_xml(user) product_list = Product.by_domain(user.domain) self._initialize_product_names(len(product_list)) fixture_original = call_fixture_generator(product_fixture_generator, user)[1] deprecated_generate_restore_payload(self.domain_obj, user) self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_original) ) first_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] # make sure the time stamp on this first sync is # not on the same second that the products were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no products synced fixture_pre_change = call_fixture_generator(product_fixture_generator, user, last_sync=first_sync) deprecated_generate_restore_payload(self.domain_obj, user) self.assertEqual( [], fixture_pre_change, "Fixture was not empty on second sync" ) second_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the product more recently updated than the # last sync for product in product_list: product.save() # now that we've updated a product, we should get # product data in sync again fixture_post_change = call_fixture_generator(product_fixture_generator, user, last_sync=second_sync)[1] # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_post_change) )
def test_selective_product_sync(self): user = bootstrap_user(self, phone_number="1234567890") expected_xml = self.generate_product_fixture_xml(user) product_list = Product.by_domain(user.domain) self._initialize_product_names(len(product_list)) fixture_original = product_fixture_generator(user, V1, None, None) generate_restore_payload(user.to_casexml_user()) self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_original[0]) ) first_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] # make sure the time stamp on this first sync is # not on the same second that the products were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no products synced fixture_pre_change = product_fixture_generator(user, V1, None, first_sync) generate_restore_payload(user.to_casexml_user()) self.assertEqual( [], fixture_pre_change, "Fixture was not empty on second sync" ) second_sync = sorted(SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).all(), key=lambda x: x.date)[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the product more recently updated than the # last sync for product in product_list: product.save() # now that we've updated a product, we should get # product data in sync again fixture_post_change = product_fixture_generator(user, V1, None, second_sync) # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual( expected_xml, ElementTree.tostring(fixture_post_change[0]) )
def test_selective_program_sync(self): user = bootstrap_user(self, phone_number="1234567890") Program(domain=user.domain, name="test1", code="t1").save() program_list = Program.by_domain(user.domain) program_xml = self.generate_program_xml(program_list, user) fixture_original = program_fixture_generator(user, V1) generate_restore_payload(self.domain, user.to_casexml_user()) self.assertXmlEqual(program_xml, ElementTree.tostring(fixture_original[0])) first_sync = sorted(SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(), key=lambda x: x.date)[-1] # make sure the time stamp on this first sync is # not on the same second that the programs were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no programs synced fixture_pre_change = program_fixture_generator(user, V1, last_sync=first_sync) generate_restore_payload(self.domain, user.to_casexml_user()) self.assertEqual([], fixture_pre_change, "Fixture was not empty on second sync") second_sync = sorted(SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(), key=lambda x: x.date)[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the program more recently updated than the # last sync for program in program_list: program.save() # now that we've updated a program, we should get # program data in sync again fixture_post_change = program_fixture_generator(user, V1, last_sync=second_sync) # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual(program_xml, ElementTree.tostring(fixture_post_change[0]))
def setUp(self): # clear cases, forms, logs for item in XFormInstance.view("couchforms/by_xmlns", include_docs=True, reduce=False).all(): item.delete() for case in CommCareCase.view("case/by_user", reduce=False, include_docs=True).all(): case.delete() for log in SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(): log.delete() self.user = User(user_id=USER_ID, username="******", password="******", date_joined=datetime(2011, 6, 9)) # this creates the initial blank sync token in the database generate_restore_payload(self.user) [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self.sync_log = sync_log
def get_all_sync_logs_docs(): assert settings.UNIT_TESTING all_sync_log_ids = [row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", reduce=False, )] return iter_docs(SyncLog.get_db(), all_sync_log_ids)
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() FormProcessorInterface.submit_form_locally(xml_data, self.domain) expected_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2"> <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" restore_payload = generate_restore_payload( project=Domain(name=self.domain), user=dummy_user(), items=True, version=V3 ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4), restore_payload )
def sync_logs_for_user(user_id): logs = SyncLog.view("phone/sync_logs_by_user", reduce=False, startkey=[user_id], endkey=[user_id, {}], include_docs=True) return render_to_string("phone/partials/sync_log_for_chw_table.html", {"sync_data": logs})
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() # implicit length assertion _, _, [newcase] = submit_form_locally(xml_data, domain=self.project.name) expected_case_block = """ <case> <case_id>asdf</case_id> <date_modified>2010-06-29T13:42:50.000000Z</date_modified> <create> <case_type_id>test_case_type</case_type_id> <user_id>foo</user_id> <case_name>test case name</case_name> <external_id>someexternal</external_id> </create> </case>""" check_xml_line_by_line(self, expected_case_block, xml.get_case_xml(newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE])) # check v2 expected_v2_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2" > <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" check_xml_line_by_line( self, expected_v2_case_block, xml.get_case_xml( newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE], version="2.0", ), ) restore_payload = generate_restore_payload( project=self.project, user=dummy_user(), items=True, ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4), restore_payload )
def rows(self): base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find')) user_id = self.request.GET.get('individual') if not user_id: return [] # security check get_document_or_404(CommCareUser, self.domain, user_id) sync_log_ids = [row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, limit=10 )] def _sync_log_to_row(sync_log): def _fmt_duration(duration): if isinstance(duration, int): return format_datatables_data( '<span class="{cls}">{text}</span>'.format( cls=_bootstrap_class(duration or 0, 60, 20), text=_('{} seconds').format(duration), ), duration ) else: return format_datatables_data( '<span class="label">{text}</span>'.format( text=_("Unknown"), ), -1, ) def _fmt_id(sync_log_id): href = base_link_url.format(id=sync_log_id) return '<a href="{href}" target="_blank">{id:.5}...</a>'.format( href=href, id=sync_log_id ) num_cases = len(sync_log.cases_on_phone) columns = [ _fmt_date(sync_log.date), format_datatables_data(num_cases, num_cases), _fmt_duration(sync_log.duration), ] if self.show_extra_columns: columns.append(_fmt_id(sync_log.get_id)) return columns return [ _sync_log_to_row(SyncLog.wrap(sync_log_json)) for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids) ]
def testInitialEmpty(self): """ Tests that a newly created sync token has no cases attached to it. """ [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self._testUpdate(sync_log.get_id, {}, {})
def testSyncToken(self): """ Tests sync token / sync mode support """ file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data) process_cases(sender="testharness", xform=form) time.sleep(1) restore_payload = generate_restore_payload(dummy_user()) # implicit length assertion [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() check_xml_line_by_line(self, dummy_restore_xml(sync_log.get_id, const.CREATE_SHORT), restore_payload) time.sleep(1) sync_restore_payload = generate_restore_payload(dummy_user(), sync_log.get_id) [latest_log] = [log for log in \ SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() \ if log.get_id != sync_log.get_id] # should no longer have a case block in the restore XML check_xml_line_by_line(self, dummy_restore_xml(latest_log.get_id), sync_restore_payload) # apply an update time.sleep(1) file_path = os.path.join(os.path.dirname(__file__), "data", "update_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data) process_cases(sender="testharness", xform=form) time.sleep(1) sync_restore_payload = generate_restore_payload(dummy_user(), latest_log.get_id) [even_latest_log] = [log for log in \ SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() \ if log.get_id != sync_log.get_id and log.get_id != latest_log.get_id] # case block should come back check_xml_line_by_line(self, dummy_restore_xml(even_latest_log.get_id, const.UPDATE_SHORT), sync_restore_payload)
def get_all_sync_logs_docs(): all_sync_log_ids = [ row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", reduce=False, ) ] return iter_docs(SyncLog.get_db(), all_sync_log_ids)
def testUserRestore(self): self.assertEqual(0, SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).count()) restore_payload = generate_restore_payload(self.project, dummy_user(), items=True) sync_log = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one() check_xml_line_by_line( self, dummy_restore_xml(sync_log.get_id, items=3), restore_payload, )
def testMultiplePartsSingleSubmit(self): """ Tests a create and update in the same form """ [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self._postWithSyncToken("case_create.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"IKA9G79J4HDSPJLG3ER2OHQUY": []})
def test_selective_program_sync(self): user = create_restore_user(self.domain.name) Program(domain=user.domain, name="test1", code="t1").save() program_list = Program.by_domain(user.domain) program_xml = self.generate_program_xml(program_list, user) fixture_original = program_fixture_generator(user, V1) generate_restore_payload(self.domain, user) self.assertXmlEqual(program_xml, ElementTree.tostring(fixture_original[0])) first_sync = sorted( SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(), key=lambda x: x.date )[-1] # make sure the time stamp on this first sync is # not on the same second that the programs were created first_sync.date += datetime.timedelta(seconds=1) # second sync is before any changes are made, so there should # be no programs synced fixture_pre_change = program_fixture_generator(user, V1, last_sync=first_sync) generate_restore_payload(self.domain, user) self.assertEqual([], fixture_pre_change, "Fixture was not empty on second sync") second_sync = sorted( SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(), key=lambda x: x.date )[-1] self.assertTrue(first_sync._id != second_sync._id) # save should make the program more recently updated than the # last sync for program in program_list: program.save() # now that we've updated a program, we should get # program data in sync again fixture_post_change = program_fixture_generator(user, V1, last_sync=second_sync) # regenerate the fixture xml to make sure it is still legit self.assertXmlEqual(program_xml, ElementTree.tostring(fixture_post_change[0]))
def get_last_synclog_for_user(user_id): return SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, limit=1, reduce=False, include_docs=True, ).one()
def get_synclogs_for_user(user_id, limit=10): result = SyncLog.view("phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, limit=limit, reduce=False, include_docs=True, wrap_doc=False) return result
def get_synclog_ids_before_date(before_date, limit=1000): if isinstance(before_date, date): before_date = before_date.strftime("%Y-%m-%d") return [r['id'] for r in SyncLog.view( "sync_logs_by_date/view", endkey=[before_date], limit=limit, reduce=False, include_docs=False )]
def get_doc_ids(self, domain): from corehq.apps.users.dbaccessors.all_commcare_users import get_all_user_ids_by_domain from casexml.apps.phone.models import SyncLog for user_id in get_all_user_ids_by_domain(domain): rows = SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id], endkey=[user_id, {}], reduce=False, include_docs=False, ) yield SyncLog, [row['id'] for row in rows]
def testMultipleCases(self): """ Test creating multiple cases from multilple forms """ [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self._postWithSyncToken("create_short.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": []}) self._postWithSyncToken("case_create.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": [], "IKA9G79J4HDSPJLG3ER2OHQUY": []})
def get_last_synclog_for_user(user_id): result = SyncLog.view("phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, limit=1, reduce=False, include_docs=True, wrap_doc=False) if result: row, = result return properly_wrap_sync_log(row['doc'])
def setUp(self): delete_all_cases() delete_all_xforms() delete_all_sync_logs() self.user = User(user_id="state_hash", username="******", password="******", date_joined=datetime(2011, 6, 9)) # this creates the initial blank sync token in the database generate_restore_payload(self.user) [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self.sync_log = sync_log
def testMultipleUpdates(self): """ Test that multiple update submissions don't update the case lists and don't create duplicates in them """ [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self._postWithSyncToken("create_short.xml", sync_log.get_id) self._postWithSyncToken("update_short.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": []}) self._postWithSyncToken("update_short_2.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": []})
def get_doc_ids(self, domain): from corehq.apps.users.dbaccessors.all_commcare_users import get_all_user_ids_by_domain from casexml.apps.phone.models import SyncLog for user_id in get_all_user_ids_by_domain(domain): # this excludes sync logs in old DB (prior to migration to synclog DB). See ``synclog_view``. rows = SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id], endkey=[user_id, {}], reduce=False, include_docs=False, ) yield SyncLog, [row['id'] for row in rows]
def get_sync_logs_for_user(user_id, limit): rows = SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, limit=limit, include_docs=True, stale=stale_ok(), wrap_doc=False ) sync_log_jsons = (row['doc'] for row in rows) return [properly_wrap_sync_log(sync_log_json) for sync_log_json in sync_log_jsons]
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data) process_cases(sender="testharness", xform=form) user = dummy_user() # implicit length assertion [newcase] = CommCareCase.view("case/by_user", reduce=False, include_docs=True).all() self.assertEqual(1, len(user.get_case_updates(None).actual_cases_to_sync)) expected_case_block = """ <case> <case_id>asdf</case_id> <date_modified>2010-06-29</date_modified> <create> <case_type_id>test_case_type</case_type_id> <user_id>foo</user_id> <case_name>test case name</case_name> <external_id>someexternal</external_id> </create> </case>""" check_xml_line_by_line(self, expected_case_block, xml.get_case_xml(newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE])) # check v2 expected_v2_case_block = """ <case case_id="asdf" date_modified="2010-06-29" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2" > <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" check_xml_line_by_line(self, expected_v2_case_block, xml.get_case_xml\ (newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE], version="2.0")) restore_payload = generate_restore_payload(dummy_user()) # implicit length assertion [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() check_xml_line_by_line(self, dummy_restore_xml(sync_log.get_id, expected_case_block), restore_payload)
def testMultipleCases(self): """ Test creating multiple cases from multilple forms """ [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self._postWithSyncToken("create_short.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": []}) self._postWithSyncToken("case_create.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, { "asdf": [], "IKA9G79J4HDSPJLG3ER2OHQUY": [] })
def get_synclog_ids_before_date(before_date, limit=1000, num_tries=10): if isinstance(before_date, date): before_date = before_date.strftime("%Y-%m-%d") for i in range(num_tries): try: return [ r['id'] for r in SyncLog.view("sync_logs_by_date/view", endkey=[before_date], limit=limit, reduce=False, include_docs=False) ] except RequestFailed: pass raise CouldNotRetrieveSyncLogIds()
def rows(self): user_id = self.request.GET.get('individual') if not user_id: return [] # security check get_document_or_404(CommCareUser, self.domain, user_id) sync_log_ids = [row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, )] def _sync_log_to_row(sync_log): def _fmt_duration(duration): if isinstance(duration, int): return format_datatables_data( '<span class="{cls}">{text}</span>'.format( cls=_bootstrap_class(duration or 0, 20, 60), text=_('{} seconds').format(duration), ), duration ) else: return format_datatables_data( '<span class="label">{text}</span>'.format( text=_("Unknown"), ), -1, ) num_cases = len(sync_log.cases_on_phone) return [ _fmt_date(sync_log.date), format_datatables_data(num_cases, num_cases), _fmt_duration(sync_log.duration), ] return [ _sync_log_to_row(SyncLog.wrap(sync_log_json)) for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids) ]
def get_synclog_ids_by_date(start_datetime, end_datetime): ''' Returns all synclog ids that have been modified within a time range. The start date is exclusive while the end date is inclusive (start_datetime, end_datetime]. ''' from casexml.apps.phone.models import SyncLog json_start_datetime = json_format_datetime(start_datetime) results = SyncLog.view("sync_logs_by_date/view", startkey=[json_start_datetime], endkey=[json_format_datetime(end_datetime)], reduce=False, include_docs=False) for result in results: result_modified_datetime = result['key'][0] # Skip the record if the datetime is equal to the start because this should return # records with an exclusive start date. if result_modified_datetime == json_start_datetime: continue yield result['id']
def testTokenAssociation(self): """ Test that individual create, update, and close submissions update the appropriate case lists in the sync token """ [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() self._postWithSyncToken("create_short.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": []}) # a normal update should have no affect self._postWithSyncToken("update_short.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {"asdf": []}) # close should remove it from the cases_on_phone list # (and currently puts it into the dependent list though this # might change. self._postWithSyncToken("close_short.xml", sync_log.get_id) self._testUpdate(sync_log.get_id, {}, {"asdf": []})
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data, domain=self.domain) process_cases(form) expected_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2"> <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" restore_payload = generate_restore_payload( project=Domain(name=self.domain), user=dummy_user(), items=True, version=V3 ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4), restore_payload )
def _test_sync_token(self, items): """ Tests sync token / sync mode support """ xml_data = self.get_xml('create_short') xml_data = xml_data.format(user_id=self.restore_user.user_id) submit_form_locally(xml_data, domain=self.project.name) restore_payload = generate_restore_payload(self.project, self.restore_user, items=items) sync_log_id = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).one().get_id expected_restore_payload = dummy_restore_xml( sync_log_id, const.CREATE_SHORT.format(user_id=self.restore_user.user_id), items=4 if items else None, user=self.restore_user, ) check_xml_line_by_line(self, expected_restore_payload, restore_payload) sync_restore_payload = generate_restore_payload( project=self.project, user=self.restore_user, restore_id=sync_log_id, items=items, ) all_sync_logs = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).all() [latest_log ] = [log for log in all_sync_logs if log.get_id != sync_log_id] # should no longer have a case block in the restore XML check_xml_line_by_line( self, dummy_restore_xml( latest_log.get_id, items=3 if items else None, user=self.restore_user, ), sync_restore_payload, ) # apply an update xml_data = self.get_xml('update_short') xml_data = xml_data.format(user_id=self.restore_user.user_id) submit_form_locally(xml_data, domain=self.project.name) sync_restore_payload = generate_restore_payload( self.project, user=self.restore_user, restore_id=latest_log.get_id, items=items, ) all_sync_logs = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).all() [even_latest_log] = [ log for log in all_sync_logs if log.get_id != sync_log_id and log.get_id != latest_log.get_id ] # case block should come back expected_sync_restore_payload = dummy_restore_xml( even_latest_log.get_id, const.UPDATE_SHORT.format(user_id=self.restore_user.user_id), items=4 if items else None, user=self.restore_user) check_xml_line_by_line(self, expected_sync_restore_payload, sync_restore_payload)
def rows(self): base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find')) user_id = self.request.GET.get('individual') if not user_id: return [] # security check get_document_or_404(CommCareUser, self.domain, user_id) sync_log_ids = [row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, limit=self.limit, )] def _sync_log_to_row(sync_log): def _fmt_duration(duration): if isinstance(duration, int): return format_datatables_data( '<span class="{cls}">{text}</span>'.format( cls=_bootstrap_class(duration or 0, 60, 20), text=_('{} seconds').format(duration), ), duration ) else: return format_datatables_data( '<span class="label">{text}</span>'.format( text=_("Unknown"), ), -1, ) def _fmt_id(sync_log_id): href = base_link_url.format(id=sync_log_id) return '<a href="{href}" target="_blank">{id:.5}...</a>'.format( href=href, id=sync_log_id ) def _fmt_error_info(sync_log): if not sync_log.had_state_error: return u'<span class="label label-success">✓</span>' else: return (u'<span class="label label-important">X</span>' u'State error {}<br>Expected hash: {:.10}...').format( _naturaltime_with_hover(sync_log.error_date), sync_log.error_hash, ) num_cases = sync_log.case_count() columns = [ _fmt_date(sync_log.date), format_datatables_data(num_cases, num_cases), _fmt_duration(sync_log.duration), ] if self.show_extra_columns: columns.append(_fmt_id(sync_log.get_id)) columns.append(sync_log.log_format) columns.append(_fmt_id(sync_log.previous_log_id) if sync_log.previous_log_id else '---') columns.append(_fmt_error_info(sync_log)) columns.append('{:.10}...'.format(sync_log.get_state_hash())) columns.append(_naturaltime_with_hover(sync_log.last_submitted)) columns.append(u'{}<br>{:.10}'.format(_naturaltime_with_hover(sync_log.last_cached), sync_log.hash_at_last_cached)) return columns return [ _sync_log_to_row(properly_wrap_sync_log(sync_log_json)) for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids) ]
def rows(self): base_link_url = '{}?q={{id}}'.format(reverse('global_quick_find')) user_id = self.request.GET.get('individual') if not user_id: return [] # security check get_document_or_404(CommCareUser, self.domain, user_id) sync_log_ids = [ row['id'] for row in SyncLog.view( "phone/sync_logs_by_user", startkey=[user_id, {}], endkey=[user_id], descending=True, reduce=False, limit=self.limit, ) ] def _sync_log_to_row(sync_log): def _fmt_duration(duration): if isinstance(duration, int): return format_datatables_data( '<span class="{cls}">{text}</span>'.format( cls=_bootstrap_class(duration or 0, 60, 20), text=_('{} seconds').format(duration), ), duration) else: return format_datatables_data( '<span class="label">{text}</span>'.format( text=_("Unknown"), ), -1, ) def _fmt_id(sync_log_id): href = base_link_url.format(id=sync_log_id) return '<a href="{href}" target="_blank">{id:.5}...</a>'.format( href=href, id=sync_log_id) def _fmt_error_info(sync_log): if not sync_log.had_state_error: return u'<span class="label label-success">✓</span>' else: return ( u'<span class="label label-important">X</span>' u'State error {}<br>Expected hash: {:.10}...').format( _naturaltime_with_hover(sync_log.error_date), sync_log.error_hash, ) num_cases = sync_log.case_count() columns = [ _fmt_date(sync_log.date), format_datatables_data(num_cases, num_cases), _fmt_duration(sync_log.duration), ] if self.show_extra_columns: columns.append(_fmt_id(sync_log.get_id)) columns.append(sync_log.log_format) columns.append( _fmt_id(sync_log.previous_log_id) if sync_log. previous_log_id else '---') columns.append(_fmt_error_info(sync_log)) columns.append('{:.10}...'.format(sync_log.get_state_hash())) columns.append(_naturaltime_with_hover( sync_log.last_submitted)) columns.append(u'{}<br>{:.10}'.format( _naturaltime_with_hover(sync_log.last_cached), sync_log.hash_at_last_cached)) return columns return [ _sync_log_to_row(properly_wrap_sync_log(sync_log_json)) for sync_log_json in iter_docs(SyncLog.get_db(), sync_log_ids) ]
def setUp(self): # clear sync logs for log in SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(): log.delete()
def _couch_count(self): return len( SyncLog.view("phone/sync_logs_by_user", include_docs=False).all())
def _test_sync_token(self, items): """ Tests sync token / sync mode support """ file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data, domain=self.project.name) process_cases(form) time.sleep(1) restore_payload = generate_restore_payload(self.project, dummy_user(), items=items) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False ).one().get_id expected_restore_payload = dummy_restore_xml( sync_log_id, const.CREATE_SHORT, items=4 if items else None, ) check_xml_line_by_line(self, expected_restore_payload, restore_payload) time.sleep(1) sync_restore_payload = generate_restore_payload( project=self.project, user=dummy_user(), restore_id=sync_log_id, items=items, ) all_sync_logs = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).all() [latest_log] = [log for log in all_sync_logs if log.get_id != sync_log_id] # should no longer have a case block in the restore XML check_xml_line_by_line( self, dummy_restore_xml(latest_log.get_id, items=3 if items else None), sync_restore_payload, ) # apply an update time.sleep(1) file_path = os.path.join(os.path.dirname(__file__), "data", "update_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data, domain=self.project.name) process_cases(form) time.sleep(1) sync_restore_payload = generate_restore_payload( self.project, user=dummy_user(), restore_id=latest_log.get_id, items=items, ) all_sync_logs = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).all() [even_latest_log] = [log for log in all_sync_logs if log.get_id != sync_log_id and log.get_id != latest_log.get_id] # case block should come back expected_sync_restore_payload = dummy_restore_xml( even_latest_log.get_id, const.UPDATE_SHORT, items=4 if items else None, ) check_xml_line_by_line(self, expected_sync_restore_payload, sync_restore_payload)
def testUserRestore(self): self.assertEqual(0, len(SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all())) restore_payload = generate_restore_payload(dummy_user()) # implicit length assertion [sync_log] = SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all() check_xml_line_by_line(self, dummy_restore_xml(sync_log.get_id), restore_payload)
expected_v2_case_block, xml.get_case_xml( result.case, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE], version="2.0", ), ) restore_payload = deprecated_generate_restore_payload( project=self.project, user=self.restore_user, items=True, ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4, user=self.restore_user), restore_payload ) def testSyncTokenWithItems(self): self._test_sync_token(items=True) def testSyncTokenWithoutItems(self): self._test_sync_token(items=False) def _test_sync_token(self, items):
def testUserRestoreWithCase(self): xml_data = self.get_xml('create_short') xml_data = xml_data.format(user_id=self.restore_user.user_id) # implicit length assertion _, _, [newcase] = submit_form_locally(xml_data, domain=self.project.name) expected_case_block = """ <case> <case_id>asdf</case_id> <date_modified>2010-06-29T13:42:50.000000Z</date_modified> <create> <case_type_id>test_case_type</case_type_id> <user_id>{user_id}</user_id> <case_name>test case name</case_name> <external_id>someexternal</external_id> </create> <update> <date_opened>2010-06-29</date_opened> </update> </case>""".format(user_id=self.restore_user.user_id) check_xml_line_by_line( self, expected_case_block, xml.get_case_xml( newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE ])) # check v2 expected_v2_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="{user_id}" xmlns="http://commcarehq.org/case/transaction/v2" > <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>{user_id}</owner_id> </create> <update> <external_id>someexternal</external_id> <date_opened>2010-06-29</date_opened> </update> </case>""".format(user_id=self.restore_user.user_id) check_xml_line_by_line( self, expected_v2_case_block, xml.get_case_xml( newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE], version="2.0", ), ) restore_payload = generate_restore_payload( project=self.project, user=self.restore_user, items=True, ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4, user=self.restore_user), restore_payload)
def testUserRestoreWithCase(self): file_path = os.path.join(os.path.dirname(__file__), "data", "create_short.xml") with open(file_path, "rb") as f: xml_data = f.read() form = post_xform_to_couch(xml_data, domain=self.project.name) # implicit length assertion [newcase] = process_cases(form) user = dummy_user() self.assertEqual(1, len(list( BatchedCaseSyncOperation(RestoreState(self.project, user, RestoreParams())).get_all_case_updates() ))) expected_case_block = """ <case> <case_id>asdf</case_id> <date_modified>2010-06-29T13:42:50.000000Z</date_modified> <create> <case_type_id>test_case_type</case_type_id> <user_id>foo</user_id> <case_name>test case name</case_name> <external_id>someexternal</external_id> </create> </case>""" check_xml_line_by_line(self, expected_case_block, xml.get_case_xml(newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE])) # check v2 expected_v2_case_block = """ <case case_id="asdf" date_modified="2010-06-29T13:42:50.000000Z" user_id="foo" xmlns="http://commcarehq.org/case/transaction/v2" > <create> <case_type>test_case_type</case_type> <case_name>test case name</case_name> <owner_id>foo</owner_id> </create> <update> <external_id>someexternal</external_id> </update> </case>""" check_xml_line_by_line( self, expected_v2_case_block, xml.get_case_xml( newcase, [case_const.CASE_ACTION_CREATE, case_const.CASE_ACTION_UPDATE], version="2.0", ), ) restore_payload = generate_restore_payload( project=self.project, user=dummy_user(), items=True, ) sync_log_id = SyncLog.view( "phone/sync_logs_by_user", include_docs=True, reduce=False, ).one().get_id check_xml_line_by_line( self, dummy_restore_xml(sync_log_id, expected_case_block, items=4), restore_payload )
def update_analytics_indexes(): SyncLog.view("phone/sync_logs_by_user", limit=1, reduce=False)
def setUp(self): # clear cases for case in CommCareCase.view("case/by_user", reduce=False, include_docs=True).all(): case.delete() for log in SyncLog.view("phone/sync_logs_by_user", include_docs=True, reduce=False).all(): log.delete()