def setUpClass(cls): super().setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) initialize_index_and_mapping(cls.es, USER_INDEX_INFO) cls.domain = uuid.uuid4().hex create_domain(cls.domain) cls.now = datetime.utcnow() cases = [ new_case(domain=cls.domain, foo="apple", bar="banana", server_modified_on=cls.now - timedelta(hours=3)), new_case(domain=cls.domain, foo="orange", bar="pear", server_modified_on=cls.now - timedelta(hours=2)), ] for case in cases: send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index)
def test_incremental_success(self): checkpoint = self.test_initial() checkpoint.status = IncrementalExportStatus.SUCCESS checkpoint.save() case = new_case(domain=self.domain, foo="peach", bar="plumb", server_modified_on=datetime.utcnow()) send_to_elasticsearch('cases', case.to_json()) self.es.indices.refresh(CASE_INDEX_INFO.index) self.addCleanup(self._cleanup_case(case.case_id)) checkpoint = _generate_incremental_export( self.incremental_export, last_doc_date=checkpoint.last_doc_date) data = checkpoint.get_blob().read().decode('utf-8-sig') expected = "Foo column,Bar column\r\npeach,plumb\r\n" self.assertEqual(data, expected) self.assertEqual(checkpoint.doc_count, 1) checkpoint = _generate_incremental_export( self.incremental_export, last_doc_date=self.now - timedelta(hours=2, minutes=1)) data = checkpoint.get_blob().read().decode("utf-8-sig") expected = "Foo column,Bar column\r\norange,pear\r\npeach,plumb\r\n" self.assertEqual(data, expected) self.assertEqual(checkpoint.doc_count, 2) self.assertEqual(self.incremental_export.checkpoints.count(), 3)
def setUpClass(cls): case_pillow = CasePillow(online=False) group_pillow = GroupPillow(online=False) cls.pillows = [case_pillow, group_pillow] for pillow in cls.pillows: completely_initialize_pillow_index(pillow) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group.get_id group_pillow.send_robust(group.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index)
def setUpClass(cls): case_pillow = CasePillow(online=False) group_pillow = GroupPillow(online=False) cls.pillows = [case_pillow, group_pillow] with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): for pillow in cls.pillows: completely_initialize_pillow_index(pillow) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group.get_id group_pillow.send_robust(group.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index)
def setUpClass(cls): super().setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) cls.domain = uuid.uuid4().hex now = datetime.utcnow() cases = [ new_case(domain=cls.domain, foo="apple", bar="banana", server_modified_on=now - timedelta(hours=3)), new_case(domain=cls.domain, foo="orange", bar="pear", server_modified_on=now - timedelta(hours=2)), ] for case in cases: send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index) cls.export_instance = CaseExportInstance( export_format=Format.UNZIPPED_CSV, domain=cls.domain, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="Foo column", item=ExportItem(path=[PathNode(name="foo")]), selected=True, ), ExportColumn( label="Bar column", item=ExportItem(path=[PathNode(name="bar")]), selected=True, ) ]) ]) cls.export_instance.save() cls.incremental_export = IncrementalExport.objects.create( domain=cls.domain, name='test_export', export_instance_id=cls.export_instance.get_id, connection_settings=ConnectionSettings.objects.create( domain=cls.domain, name='test conn', url='http://somewhere', auth_type=BASIC_AUTH, ))
def setUpClass(cls): super(ExportTest, cls).setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) case = new_case(_id='robin', name='batman', foo="apple", bar="banana", date='2016-4-24') send_to_elasticsearch('cases', case.to_json()) case = new_case(owner_id="some_other_owner", foo="apple", bar="banana", date='2016-4-04') send_to_elasticsearch('cases', case.to_json()) case = new_case(type="some_other_type", foo="apple", bar="banana") send_to_elasticsearch('cases', case.to_json()) case = new_case(closed=True, foo="apple", bar="banana") send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index) cache.clear()
def setUpClass(cls): cls.case_pillow = CasePillow(online=False) completely_initialize_pillow_index(cls.case_pillow) case = new_case(foo="apple", bar="banana") cls.case_pillow.send_robust(case.to_json()) case = new_case(owner_id="some_other_owner", foo="apple", bar="banana") cls.case_pillow.send_robust(case.to_json()) case = new_case(type="some_other_type", foo="apple", bar="banana") cls.case_pillow.send_robust(case.to_json()) case = new_case(closed=True, foo="apple", bar="banana") cls.case_pillow.send_robust(case.to_json()) cls.case_pillow.get_es_new().indices.refresh(cls.case_pillow.es_index)
def setUpClass(cls): cls.case_pillow = CasePillow(online=False) with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): completely_initialize_pillow_index(cls.case_pillow) case = new_case(foo="apple", bar="banana", date='2016-4-24') cls.case_pillow.send_robust(case.to_json()) case = new_case(owner_id="some_other_owner", foo="apple", bar="banana", date='2016-4-04') cls.case_pillow.send_robust(case.to_json()) case = new_case(type="some_other_type", foo="apple", bar="banana") cls.case_pillow.send_robust(case.to_json()) case = new_case(closed=True, foo="apple", bar="banana") cls.case_pillow.send_robust(case.to_json()) cls.case_pillow.get_es_new().indices.refresh(cls.case_pillow.es_index)
def setUpClass(cls): form_pillow = XFormPillow(online=False) case_pillow = CasePillow(online=False) cls.pillows = [form_pillow, case_pillow] es = get_es_new() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): for pillow in cls.pillows: completely_initialize_pillow_index(pillow) initialize_index_and_mapping(es, GROUP_INDEX_INFO) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group._id send_to_elasticsearch('groups', group.to_json()) form = new_form(form={"meta": {"userID": None}}) form_pillow.send_robust(form.to_json()) form = new_form(form={"meta": {"userID": ""}}) form_pillow.send_robust(form.to_json()) form = new_form(form={"meta": {"deviceID": "abc"}}) form_pillow.send_robust(form.to_json()) form = new_form(form={"meta": {"userID": uuid.uuid4().hex}}) form_pillow.send_robust(form.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index) es.indices.refresh(GROUP_INDEX_INFO.index)
def setUpClass(cls): with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): es = get_es_new() cls.tearDownClass() initialize_index_and_mapping(es, CASE_INDEX_INFO) initialize_index_and_mapping(es, GROUP_INDEX_INFO) initialize_index_and_mapping(es, XFORM_INDEX_INFO) case = new_case(closed=True) send_to_elasticsearch('cases', case.to_json()) case = new_case(closed=False) send_to_elasticsearch('cases', case.to_json()) case = new_case(closed=True, owner_id="foo") send_to_elasticsearch('cases', case.to_json()) case = new_case(closed=False, owner_id="bar") send_to_elasticsearch('cases', case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group._id send_to_elasticsearch('groups', group.to_json()) form = new_form(form={"meta": {"userID": None}}) send_to_elasticsearch('forms', form.to_json()) form = new_form(form={"meta": {"userID": ""}}) send_to_elasticsearch('forms', form.to_json()) form = new_form(form={"meta": {"deviceID": "abc"}}) send_to_elasticsearch('forms', form.to_json()) form = new_form(form={"meta": {"userID": uuid.uuid4().hex}}) send_to_elasticsearch('forms', form.to_json()) es.indices.refresh(CASE_INDEX_INFO.index) es.indices.refresh(XFORM_INDEX_INFO.index) es.indices.refresh(GROUP_INDEX_INFO.index)
def test_owner_filter(self): setup_locations_and_types( self.domain, ['state', 'health-department', 'team', 'sub-team'], [], [('State1', [ ('HealthDepartment1', [ ('Team1', [ ('SubTeam1', []), ('SubTeam2', []), ]), ('Team2', []), ]), ])]) team1 = SQLLocation.objects.filter(domain=self.domain, name='Team1').first() health_department = SQLLocation.objects.filter( domain=self.domain, name='HealthDepartment1').first() self.addCleanup(delete_all_locations) user = CommCareUser.create(self.domain, 'm2', 'abc', None, None, location=team1) send_to_elasticsearch('users', user.to_json()) self.es.indices.refresh(USER_INDEX_INFO.index) self.addCleanup(delete_all_users) cases = [ new_case( domain=self.domain, foo="peach", bar="plumb", server_modified_on=datetime.utcnow() + timedelta(hours=-1), owner_id='123', ), new_case( domain=self.domain, foo="orange", bar="melon", server_modified_on=datetime.utcnow(), owner_id=user. user_id, # this user is part of the team1 location. ), new_case( domain=self.domain, foo="grape", bar="pineapple", server_modified_on=datetime.utcnow(), ), ] for case in cases: send_to_elasticsearch("cases", case.to_json()) self.addCleanup(self._cleanup_case(case.case_id)) self.es.indices.refresh(CASE_INDEX_INFO.index) self.export_instance.filters.show_project_data = False self.export_instance.filters.locations = [ health_department.location_id ] self.export_instance.filters.users = ['123'] self.export_instance.save() checkpoint = _generate_incremental_export(self.incremental_export) data = checkpoint.get_blob().read().decode("utf-8-sig") expected = "Foo column,Bar column\r\npeach,plumb\r\norange,melon\r\n" self.assertEqual(data, expected)