def test_get_list(self): """ Any case in the appropriate domain should be in the list from the API. """ # The actual infrastructure involves saving to CouchDB, having PillowTop # read the changes and write it to ElasticSearch. #the pillow is set to offline mode - elasticsearch not needed to validate pillow = CasePillow(online=False) fake_case_es = FakeXFormES() v0_4.MOCK_CASE_ES = fake_case_es modify_date = datetime.utcnow() backend_case = CommCareCase(server_modified_on=modify_date, domain=self.domain.name) backend_case.save() translated_doc = pillow.change_transform(backend_case.to_json()) fake_case_es.add_doc(translated_doc['_id'], translated_doc) self.client.login(username=self.username, password=self.password) response = self.client.get(self.list_endpoint) self.assertEqual(response.status_code, 200) api_cases = simplejson.loads(response.content)['objects'] self.assertEqual(len(api_cases), 1) api_case = api_cases[0] self.assertEqual(dateutil.parser.parse(api_case['server_date_modified']), backend_case.server_modified_on) backend_case.delete()
def finish_handle(self): filepath = os.path.join(settings.FILEPATH, 'corehq', 'pillows', 'mappings', 'case_mapping.py') case_pillow = CasePillow(create_index=False) #check current index current_index = case_pillow.es_index sys.stderr.write("current index:\n") sys.stderr.write('CASE_INDEX="%s"\n' % current_index) #regenerate the mapping dict mapping = case_mapping.CASE_MAPPING case_pillow.default_mapping = mapping delattr(case_pillow, '_calc_meta_cache') calc_index = "%s_%s" % (case_pillow.es_index_prefix, case_pillow.calc_meta()) #aliased_indices = case_pillow.check_alias() # if calc_index not in aliased_indices and calc_index != current_index: # sys.stderr.write("\n\tWarning, current index %s is not aliased at the moment\n" % current_index) # sys.stderr.write("\tCurrent live aliased index: %s\n\n" % (','.join(aliased_indices))) if calc_index != current_index: sys.stderr.write("############# HEADS UP!!! #################\n") sys.stderr.write( "CASE_INDEX hash has changed, please update \n\t%s\n\tCASE_INDEX property with the line below:\n" % filepath) sys.stdout.write('CASE_INDEX="%s"\n' % calc_index) else: sys.stderr.write("CASE_INDEX unchanged\n")
def finish_handle(self): filepath = os.path.join(settings.FILEPATH, 'submodules','core-hq-src','corehq','pillows','mappings','case_mapping.py') casepillow = CasePillow(create_index=False) #current index #check current index aliased_indices = casepillow.check_alias() # current_index = '%s_%s' % (casepillow.es_index_prefix, casepillow.calc_meta()) current_index = casepillow.es_index #regenerate the mapping dict m = DEFAULT_MAPPING_WRAPPER m['properties'] = dynamic.set_properties(self.doc_class, custom_types=case_special_types) m['_meta']['comment'] = "Autogenerated [%s] mapping from ptop_generate_mapping %s" % (self.doc_class_str, datetime.utcnow().strftime('%m/%d/%Y')) casepillow.default_mapping = m delattr(casepillow, '_calc_meta_cache') output = [] output.append('CASE_INDEX="%s_%s"' % (casepillow.es_index_prefix, casepillow.calc_meta())) output.append('CASE_MAPPING=%s' % pprint.pformat(m)) newcalc_index = "%s_%s" % (casepillow.es_index_prefix, casepillow.calc_meta()) print "Writing new case_index and mapping: %s" % output[0] with open(filepath, 'w') as outfile: outfile.write('\n'.join(output)) if newcalc_index not in aliased_indices and newcalc_index != current_index: sys.stderr.write("\n\tWarning, current index %s is not aliased at the moment\n" % current_index) sys.stderr.write("\tCurrent live aliased index: %s\n\n" % (','.join(aliased_indices))) sys.stderr.write("File written to %s\n" % filepath)
def finish_handle(self): filepath = os.path.join(settings.FILEPATH, 'corehq','pillows','mappings','case_mapping.py') case_pillow = CasePillow(create_index=False) #check current index current_index = case_pillow.es_index sys.stderr.write("current index:\n") sys.stderr.write('CASE_INDEX="%s"\n' % current_index) #regenerate the mapping dict mapping = case_mapping.CASE_MAPPING case_pillow.default_mapping = mapping delattr(case_pillow, '_calc_meta_cache') calc_index = "%s_%s" % (case_pillow.es_index_prefix, case_pillow.calc_meta()) #aliased_indices = case_pillow.check_alias() # if calc_index not in aliased_indices and calc_index != current_index: # sys.stderr.write("\n\tWarning, current index %s is not aliased at the moment\n" % current_index) # sys.stderr.write("\tCurrent live aliased index: %s\n\n" % (','.join(aliased_indices))) if calc_index != current_index: sys.stderr.write("############# HEADS UP!!! #################\n") sys.stderr.write("CASE_INDEX hash has changed, please update \n\t%s\n\tCASE_INDEX property with the line below:\n" % filepath) sys.stdout.write('CASE_INDEX="%s"\n' % calc_index) else: sys.stderr.write("CASE_INDEX unchanged\n")
def test_get_list(self): """ Any case in the appropriate domain should be in the list from the API. """ # The actual infrastructure involves saving to CouchDB, having PillowTop # read the changes and write it to ElasticSearch. #the pillow is set to offline mode - elasticsearch not needed to validate pillow = CasePillow(online=False) fake_case_es = FakeXFormES() v0_4.MOCK_CASE_ES = fake_case_es modify_date = datetime.utcnow() backend_case = CommCareCase(server_modified_on=modify_date, domain=self.domain.name) backend_case.type = CC_BIHAR_PREGNANCY backend_case.save() translated_doc = pillow.change_transform(backend_case.to_json()) fake_case_es.add_doc(translated_doc['_id'], translated_doc) self.client.login(username=self.username, password=self.password) response = self.client.get(self.list_endpoint) self.assertEqual(response.status_code, 200) api_cases = simplejson.loads(response.content)['objects'] self.assertEqual(len(api_cases), 2) api_case = api_cases['mother_lists'][0] self.assertEqual(dateutil.parser.parse(api_case['server_date_modified']), backend_case.server_modified_on) backend_case.delete()
def setUpClass(cls): case_pillow = CasePillow(online=False) group_pillow = GroupPillow(online=False) cls.pillows = [case_pillow, group_pillow] for pillow in cls.pillows: completely_initialize_pillow_index(pillow) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group.get_id group_pillow.send_robust(group.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index)
def setUpClass(cls): case_pillow = CasePillow(online=False) group_pillow = GroupPillow(online=False) cls.pillows = [case_pillow, group_pillow] with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): for pillow in cls.pillows: completely_initialize_pillow_index(pillow) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group.get_id group_pillow.send_robust(group.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index)
def testOwnerIDSetOnTransform(self): """ Test that the owner_id gets set to the case when the pillow calls change transform """ case_owner_id = CASE_WITH_OWNER_ID case_no_owner_id = CASE_NO_OWNER_ID pillow = CasePillow(create_index=False, online=False) changed_with_owner_id = pillow.change_transform(case_owner_id) changed_with_no_owner_id = pillow.change_transform(case_no_owner_id) self.assertEqual(changed_with_owner_id.get("owner_id"), "testuser") self.assertEqual(changed_with_no_owner_id.get("owner_id"), "testuser")
def setUpClass(cls): form_pillow = XFormPillow(online=False) case_pillow = CasePillow(online=False) cls.pillows = [form_pillow, case_pillow] es = get_es_new() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): for pillow in cls.pillows: completely_initialize_pillow_index(pillow) initialize_index_and_mapping(es, GROUP_INDEX_INFO) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group._id send_to_elasticsearch('groups', group.to_json()) form = new_form(form={"meta": {"userID": None}}) form_pillow.send_robust(form.to_json()) form = new_form(form={"meta": {"userID": ""}}) form_pillow.send_robust(form.to_json()) form = new_form(form={"meta": {"deviceID": "abc"}}) form_pillow.send_robust(form.to_json()) form = new_form(form={"meta": {"userID": uuid.uuid4().hex}}) form_pillow.send_robust(form.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index) es.indices.refresh(GROUP_INDEX_INFO.index)
def finish_handle(self): filepath = os.path.join(settings.FILEPATH, 'corehq', 'pillows', 'mappings', 'case_mapping.py') casepillow = CasePillow(create_index=False) #current index #check current index aliased_indices = casepillow.check_alias() # current_index = '%s_%s' % (casepillow.es_index_prefix, casepillow.calc_meta()) current_index = casepillow.es_index #regenerate the mapping dict m = DEFAULT_MAPPING_WRAPPER m['properties'] = dynamic.set_properties( self.doc_class, custom_types=case_special_types) m['_meta'][ 'comment'] = "Autogenerated [%s] mapping from ptop_generate_mapping %s" % ( self.doc_class_str, datetime.utcnow().strftime('%m/%d/%Y')) casepillow.default_mapping = m delattr(casepillow, '_calc_meta_cache') output = [] output.append('CASE_INDEX="%s_%s"' % (casepillow.es_index_prefix, casepillow.calc_meta())) output.append('CASE_MAPPING=%s' % pprint.pformat(m)) newcalc_index = "%s_%s" % (casepillow.es_index_prefix, casepillow.calc_meta()) print "Writing new case_index and mapping: %s" % output[0] with open(filepath, 'w') as outfile: outfile.write('\n'.join(output)) if newcalc_index not in aliased_indices and newcalc_index != current_index: sys.stderr.write( "\n\tWarning, current index %s is not aliased at the moment\n" % current_index) sys.stderr.write("\tCurrent live aliased index: %s\n\n" % (','.join(aliased_indices))) sys.stderr.write("File written to %s\n" % filepath)
def setUp(self): FormProcessorTestUtils.delete_all_cases() self.pillow = CasePillow() self.elasticsearch = self.pillow.get_es_new() delete_es_index(self.pillow.es_index)
class CasePillowTest(TestCase): domain = 'case-pillowtest-domain' def setUp(self): FormProcessorTestUtils.delete_all_cases() self.pillow = CasePillow() self.elasticsearch = self.pillow.get_es_new() delete_es_index(self.pillow.es_index) def tearDown(self): ensure_index_deleted(self.pillow.es_index) def test_case_pillow_couch(self): # make a case case_id = uuid.uuid4().hex case_name = 'case-name-{}'.format(uuid.uuid4().hex) case = self._make_a_case(case_id, case_name) # send to elasticsearch self._sync_couch_cases_to_es() # verify there results = CaseES().run() self.assertEqual(1, results.total) case_doc = results.hits[0] self.assertEqual(self.domain, case_doc['domain']) self.assertEqual(case_id, case_doc['_id']) self.assertEqual(case_name, case_doc['name']) # cleanup case.delete() def test_case_soft_deletion(self): # make a case case_id = uuid.uuid4().hex case_name = 'case-name-{}'.format(uuid.uuid4().hex) case = self._make_a_case(case_id, case_name) # send to elasticsearch self._sync_couch_cases_to_es() # verify there results = CaseES().run() self.assertEqual(1, results.total) seq_before_deletion = self.pillow.get_change_feed().get_latest_change_id() # soft delete the case case.soft_delete() # sync to elasticsearch self._sync_couch_cases_to_es(since=seq_before_deletion) # ensure not there anymore results = CaseES().run() self.assertEqual(0, results.total) # cleanup case.delete() @override_settings(TESTS_SHOULD_USE_SQL_BACKEND=True) def test_case_pillow_sql(self): consumer = get_test_kafka_consumer(topics.CASE_SQL) # have to get the seq id before the change is processed kafka_seq = consumer.offsets()['fetch'][(topics.CASE_SQL, 0)] # make a case case_id = uuid.uuid4().hex case_name = 'case-name-{}'.format(uuid.uuid4().hex) case = self._make_a_case(case_id, case_name) # confirm change made it to kafka message = consumer.next() change_meta = change_meta_from_kafka_message(message.value) self.assertEqual(case.case_id, change_meta.document_id) self.assertEqual(self.domain, change_meta.domain) # send to elasticsearch sql_pillow = get_sql_case_to_elasticsearch_pillow() sql_pillow.process_changes(since=kafka_seq, forever=False) self.elasticsearch.indices.refresh(self.pillow.es_index) # confirm change made it to elasticserach results = CaseES().run() self.assertEqual(1, results.total) case_doc = results.hits[0] self.assertEqual(self.domain, case_doc['domain']) self.assertEqual(case_id, case_doc['_id']) self.assertEqual(case_name, case_doc['name']) def _make_a_case(self, case_id, case_name): # this avoids having to deal with all the reminders code bootstrap with drop_connected_signals(case_post_save): form, cases = post_case_blocks( [ CaseBlock( create=True, case_id=case_id, case_name=case_name, ).as_xml() ], domain=self.domain ) self.assertEqual(1, len(cases)) return cases[0] def _sync_couch_cases_to_es(self, since=0): self.pillow.process_changes(since=since, forever=False) self.elasticsearch.indices.refresh(self.pillow.es_index)
def setUp(self): FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.pillow = CasePillow() self.elasticsearch = self.pillow.get_es_new() delete_es_index(self.pillow.es_index)