def setUp(self): FormProcessorTestUtils.delete_all_cases() self.elasticsearch = get_es_new() self.pillow = get_case_search_to_elasticsearch_pillow() ensure_index_deleted(CASE_SEARCH_INDEX) # Bootstrap ES initialize_index_and_mapping(get_es_new(), CASE_SEARCH_INDEX_INFO)
def setUp(self): FormProcessorTestUtils.delete_all_cases(self.domain) FormProcessorTestUtils.delete_all_ledgers(self.domain) with trap_extra_setup(ConnectionError): self.pillow = get_ledger_to_elasticsearch_pillow() self.elasticsearch = get_es_new() ensure_index_deleted(LEDGER_INDEX_INFO.index) initialize_index_and_mapping(get_es_new(), LEDGER_INDEX_INFO)
def setUp(self): super(CaseSearchPillowTest, self).setUp() FormProcessorTestUtils.delete_all_cases() self.elasticsearch = get_es_new() self.pillow = get_case_pillow(skip_ucr=True) ensure_index_deleted(CASE_SEARCH_INDEX) # Bootstrap ES initialize_index_and_mapping(get_es_new(), CASE_SEARCH_INDEX_INFO)
def get_xform_pillow(pillow_id='xform-pillow', ucr_division=None, include_ucrs=None, exclude_ucrs=None, num_processes=1, process_num=0, ucr_configs=None, skip_ucr=False, processor_chunk_size=DEFAULT_PROCESSOR_CHUNK_SIZE, topics=None, **kwargs): # avoid circular dependency from corehq.pillows.reportxform import transform_xform_for_report_forms_index, report_xform_filter from corehq.pillows.mappings.user_mapping import USER_INDEX if topics: assert set(topics).issubset(FORM_TOPICS), "This is a pillow to process cases only" topics = topics or FORM_TOPICS change_feed = KafkaChangeFeed( topics, client_id=pillow_id, num_processes=num_processes, process_num=process_num ) ucr_processor = ConfigurableReportPillowProcessor( data_source_providers=[DynamicDataSourceProvider(), StaticDataSourceProvider()], ucr_division=ucr_division, include_ucrs=include_ucrs, exclude_ucrs=exclude_ucrs, ) xform_to_es_processor = ElasticProcessor( elasticsearch=get_es_new(), index_info=XFORM_INDEX_INFO, doc_prep_fn=transform_xform_for_elasticsearch, doc_filter_fn=xform_pillow_filter, ) unknown_user_form_processor = UnknownUsersProcessor() form_meta_processor = FormSubmissionMetadataTrackerProcessor() checkpoint_id = "{}-{}-{}-{}".format( pillow_id, XFORM_INDEX_INFO.index, REPORT_XFORM_INDEX_INFO.index, USER_INDEX) checkpoint = KafkaPillowCheckpoint(checkpoint_id, topics) event_handler = KafkaCheckpointEventHandler( checkpoint=checkpoint, checkpoint_frequency=1000, change_feed=change_feed, checkpoint_callback=ucr_processor ) if ucr_configs: ucr_processor.bootstrap(ucr_configs) processors = [xform_to_es_processor, form_meta_processor, unknown_user_form_processor] if not settings.ENTERPRISE_MODE: xform_to_report_es_processor = ElasticProcessor( elasticsearch=get_es_new(), index_info=REPORT_XFORM_INDEX_INFO, doc_prep_fn=transform_xform_for_report_forms_index, doc_filter_fn=report_xform_filter ) processors = [xform_to_report_es_processor] + processors if not skip_ucr: processors = [ucr_processor] + processors return ConstructedPillow( name=pillow_id, change_feed=change_feed, checkpoint=checkpoint, change_processed_event_handler=event_handler, processor=processors, processor_chunk_size=processor_chunk_size )
def delete_case_search_cases(domain): if domain is None or isinstance(domain, dict): raise TypeError("Domain attribute is required") query = {'query': CaseSearchES().domain(domain).raw_query['query']} get_es_new().delete_by_query( index=CASE_SEARCH_INDEX, doc_type=CASE_ES_TYPE, body=query, )
def setUpClass(cls): super(TestUserSyncToEs, cls).setUpClass() # create the index cls.es = get_es_new() with trap_extra_setup(ConnectionError): initialize_index_and_mapping(cls.es, USER_INDEX_INFO)
def setUp(self): with trap_extra_setup(ConnectionError): ensure_index_deleted(USER_INDEX) self.es_client = get_es_new() initialize_index_and_mapping(self.es_client, USER_INDEX_INFO) self.user_id = 'user1' _create_es_user(self.es_client, self.user_id, self.domain)
def setUp(self): self.domain_obj = create_domain(self.domain) es = get_es_new() initialize_index_and_mapping(es, USER_INDEX_INFO) self.region = LocationType.objects.create(domain=self.domain, name="region") self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region) self.data_source_config = DataSourceConfiguration( domain=self.domain, display_name='Locations in Westworld', referenced_doc_type='Location', table_id=clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": "name" }, "column_id": "location_name", "display_name": "location_name", "datatype": "string" }], ) self.data_source_config.validate() self.data_source_config.save() self.pillow = get_location_pillow(ucr_configs=[self.data_source_config]) self.pillow.get_change_feed().get_latest_offsets()
def setUp(self): self.index_info = DOMAIN_INDEX_INFO self.elasticsearch = get_es_new() ensure_index_deleted(self.index_info.index) initialize_index(self.elasticsearch, self.index_info) import time time.sleep(1) # without this we get a 503 response about 30% of the time
def handle(self, **options): flip_all = options['flip_all'] code_red = options['code_red'] es = get_es_new() es_indices = list(get_all_expected_es_indices()) if code_red: if input('\n'.join([ 'CODE RED!!!', 'Really delete ALL the elastic indices and pillow checkpoints?', 'The following indices will be affected:', '\n'.join([six.text_type(index_info) for index_info in es_indices]), 'This is a PERMANENT action. (Type "code red" to continue):', '', ])).lower() == 'code red': for index_info in es_indices: try: es.indices.delete(index_info.index) except NotFoundError: print('elastic index not present: {}'.format(index_info.index)) else: print('deleted elastic index: {}'.format(index_info.index)) else: print('Safety first!') return if flip_all: for index_info in es_indices: assume_alias(es, index_info.index, index_info.alias) print(simplejson.dumps(es.indices.get_aliases(), indent=4))
def delete_es_index(es_index): if es_index.startswith(TEST_ES_PREFIX): from corehq.elastic import get_es_new es = get_es_new() es.indices.delete(index=es_index) else: raise DeleteProductionESIndex('You cannot delete a production index in tests!!')
def setUpClass(cls): @patch('couchforms.analytics.FormES.index', XFORM_INDEX_INFO.index) @patch('corehq.apps.es.es_query.ES_META', TEST_ES_META) @patch('corehq.elastic.ES_META', TEST_ES_META) def create_form_and_sync_to_es(received_on): with process_kafka_changes('XFormToElasticsearchPillow'): with process_couch_changes('DefaultChangeFeedPillow'): metadata = TestFormMetadata(domain=cls.domain, app_id=cls.app_id, xmlns=cls.xmlns, received_on=received_on) form = get_form_ready_to_save(metadata, is_db_test=True) form_processor = FormProcessorInterface(domain=cls.domain) form_processor.save_processed_models([form]) return form from casexml.apps.case.tests.util import delete_all_xforms delete_all_xforms() cls.now = datetime.datetime.utcnow() cls._60_days = datetime.timedelta(days=60) cls.domain = 'my_crazy_analytics_domain' cls.app_id = uuid.uuid4().hex cls.xmlns = 'my://crazy.xmlns/' with trap_extra_setup(ConnectionError): cls.elasticsearch = get_es_new() initialize_index_and_mapping(cls.elasticsearch, XFORM_INDEX_INFO) cls.forms = [create_form_and_sync_to_es(cls.now), create_form_and_sync_to_es(cls.now-cls._60_days)] cls.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
def setUpClass(cls): super(XFormESTestCase, cls).setUpClass() cls.now = datetime.datetime.utcnow() cls.forms = [] with trap_extra_setup(ConnectionError): cls.es = get_es_new() initialize_index_and_mapping(cls.es, XFORM_INDEX_INFO)
def handle(self, **options): es = get_es_new() # call this before getting existing indices because apparently getting the pillow will create the index # if it doesn't exist # fixme: this can delete real indices if a reindex is in progress found_indices = set(es.indices.get_aliases().keys()) expected_indices = {info.index for info in get_all_expected_es_indices()} print(expected_indices) if options['verbose']: if expected_indices - found_indices: print('the following indices were not found:\n{}\n'.format( '\n'.join(expected_indices - found_indices) )) print('expecting {} indices:\n{}\n'.format(len(expected_indices), '\n'.join(sorted(expected_indices)))) unref_indices = set([index for index in found_indices if index not in expected_indices]) if unref_indices: if options['delete']: _delete_indices(es, unref_indices) else: _close_indices(es, unref_indices, options['noinput']) else: print('no indices need pruning')
def setUp(self): super(ReportCaseReindexerTest, self).setUp() FormProcessorTestUtils.delete_all_xforms() FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() ensure_index_deleted(REPORT_CASE_INDEX_INFO.index)
def get_sql_case_reindexer(): return ElasticPillowReindexer( pillow=get_sql_case_to_elasticsearch_pillow(), change_provider=SqlCaseChangeProvider(), elasticsearch=get_es_new(), index_info=_get_case_index_info(), )
def handle(self, *args, **options): es = get_es_new() # call this before getting existing indices because apparently getting the pillow will create the index # if it doesn't exist found_indices = set(es.indices.get_aliases().keys()) existing_indices = set( pillow.es_index for pillow in filter(lambda x: isinstance(x, AliasedElasticPillow), get_all_pillow_instances()) ) if options['verbose']: if existing_indices - found_indices: print 'the following indices were not found:\n{}\n'.format( '\n'.join(existing_indices - found_indices) ) print 'expecting {} indices:\n{}\n'.format(len(existing_indices), '\n'.join(sorted(existing_indices))) to_delete = set([index for index in found_indices if index not in existing_indices]) if to_delete: if options['noinput'] or raw_input( '\n'.join([ 'Really delete ALL the unrecognized elastic indices?', 'Here are the indices that will be deleted:', '\n'.join(sorted(to_delete)), 'This operation is not reversible and all data will be lost.', 'Type "delete indices" to continue:\n', ])).lower() == 'delete indices': for index in to_delete: es.indices.delete(index) else: print 'aborted' else: print 'no indices need pruning'
def setUp(self): super(DomainPillowTest, self).setUp() self.index_info = DOMAIN_INDEX_INFO self.elasticsearch = get_es_new() delete_all_domains() ensure_index_deleted(self.index_info.index) initialize_index(self.elasticsearch, self.index_info)
def get_ledger_v2_reindexer(): return ElasticPillowReindexer( pillow=get_ledger_to_elasticsearch_pillow(), change_provider=LedgerV2ChangeProvider(), elasticsearch=get_es_new(), index_info=LEDGER_INDEX_INFO, )
def apps_update_calculated_properties(): es = get_es_new() q = {"filter": {"and": [{"missing": {"field": "copy_of"}}]}} results = stream_es_query(q=q, es_index='apps', size=999999, chunksize=500) for r in results: calced_props = {"cp_is_active": is_app_active(r["_id"], r["_source"]["domain"])} es.update(APP_INDEX, ES_META['apps'].type, r["_id"], body={"doc": calced_props})
def get_default_reindexer_for_elastic_pillow(pillow, change_provider): return ElasticPillowReindexer( pillow=pillow, change_provider=change_provider, elasticsearch=get_es_new(), index_info=get_index_info_from_pillow(pillow), )
def get_case_to_report_es_processor(): return ElasticProcessor( elasticsearch=get_es_new(), index_info=REPORT_CASE_INDEX_INFO, doc_prep_fn=transform_case_to_report_es, doc_filter_fn=report_case_filter, )
def setUp(self): super(XFormPillowTest, self).setUp() FormProcessorTestUtils.delete_all_xforms() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() initialize_index_and_mapping(self.elasticsearch, XFORM_INDEX_INFO) delete_es_index(XFORM_INDEX_INFO.index)
def build(self): """Returns a reindexer that will return either all domains with case search enabled, or a single domain if passed in """ limit_to_db = self.options.pop('limit_to_db', None) domain = self.options.pop('domain', None) limit_db_aliases = [limit_to_db] if limit_to_db else None initialize_index_and_mapping(get_es_new(), CASE_SEARCH_INDEX_INFO) try: if domain is not None: if not domain_needs_search_index(domain): raise CaseSearchNotEnabledException("{} does not have case search enabled".format(domain)) domains = [domain] else: # return changes for all enabled domains domains = domains_needing_search_index() change_provider = get_domain_case_change_provider(domains=domains, limit_db_aliases=limit_db_aliases) except ProgrammingError: # The db hasn't been intialized yet, so skip this reindex and complain. return _fail_gracefully_and_tell_admins() else: return PillowChangeProviderReindexer( get_case_search_processor(), change_provider=change_provider, )
def setUp(self): self.es = get_es_new() self.index = TEST_INDEX_INFO.index with trap_extra_setup(ConnectionError): ensure_index_deleted(self.index) initialize_index_and_mapping(self.es, TEST_INDEX_INFO)
def _get_latest_doc_from_index(es_index, sort_field): """ Query elasticsearch index sort descending by the sort field and get the doc_id back so we can then do a rev-update check. This si because there's no direct view known ahead of time what's inside the report* index, so just get it directly from the index and do the modify check workflow. """ recent_query = { "filter": { "match_all": {} }, "sort": {sort_field: "desc"}, "size": 1 } es = get_es_new() try: res = es.search(es_index, body=recent_query) if 'hits' in res: if 'hits' in res['hits']: result = res['hits']['hits'][0] return result['_source']['_id'] except Exception, ex: logging.error("Error querying get_latest_doc_from_index[%s]: %s" % (es_index, ex)) return None
def setUp(self): super(UserPillowTestBase, self).setUp() self.index_info = USER_INDEX_INFO self.elasticsearch = get_es_new() delete_all_users() ensure_index_deleted(self.index_info.index) initialize_index(self.elasticsearch, self.index_info)
def handle(self, *args, **options): if len(args) != 0: raise CommandError("This command doesn't expect arguments!") flip_all = options['flip_all'] code_red = options['code_red'] es = get_es_new() es_indices = list(get_all_expected_es_indices()) if code_red: if raw_input('\n'.join([ 'CODE RED!!!', 'Really delete ALL the elastic indices and pillow checkpoints?', 'The following indices will be affected:', '\n'.join([unicode(index_info) for index_info in es_indices]), 'This is a PERMANENT action. (Type "code red" to continue):', '', ])).lower() == 'code red': for index_info in es_indices: es.indices.delete(index_info.index) print 'deleted elastic index: {}'.format(index_info.index) else: print 'Safety first!' return if flip_all: for index_info in es_indices: assume_alias(es, index_info.index, index_info.alias) print simplejson.dumps(es.indices.get_aliases(), indent=4)
def get_case_name(case_id): from corehq.pillows.mappings.case_mapping import CASE_INDEX_INFO try: result = get_es_new().get(CASE_INDEX_INFO.index, case_id, _source_include=['name']) except ElasticsearchException: return None return result['_source']['name']
def get_ledger_v1_reindexer(): from corehq.apps.commtrack.models import StockState return ElasticPillowReindexer( pillow=get_ledger_to_elasticsearch_pillow(), change_provider=DjangoModelChangeProvider(StockState, _ledger_v1_to_change), elasticsearch=get_es_new(), index_info=LEDGER_INDEX_INFO, )
def setUpClass(cls): super().setUpClass() cls.es = get_es_new() initialize_index_and_mapping(cls.es, USER_INDEX_INFO) initialize_index_and_mapping(cls.es, XFORM_INDEX_INFO) today = datetime.datetime.utcnow() one_year_ago = add_months_to_date(today.date(), -12) enterprise_plan = get_enterprise_software_plan() cls.billing_account = get_enterprise_account() cls.domains = [ create_domain('test-emw-settings-001'), create_domain('test-emw-settings-002'), ] add_domains_to_enterprise_account(cls.billing_account, cls.domains, enterprise_plan, one_year_ago) cls.emw_settings = EnterpriseMobileWorkerSettings.objects.create( account=cls.billing_account, enable_auto_deactivation=True, ) cls.active_user1 = CommCareUser.create( domain=cls.domains[0].name, username='******', password='******', created_by=None, created_via=None, is_active=True, ) cls.active_user2 = CommCareUser.create( domain=cls.domains[0].name, username='******', password='******', created_by=None, created_via=None, is_active=True, ) cls.active_user3 = CommCareUser.create( domain=cls.domains[1].name, username='******', password='******', created_by=None, created_via=None, is_active=True, ) cls.active_user4 = CommCareUser.create( domain=cls.domains[1].name, username='******', password='******', created_by=None, created_via=None, is_active=True, ) cls.active_user5 = CommCareUser.create( domain=cls.domains[1].name, username='******', password='******', created_by=None, created_via=None, is_active=True, ) cls.active_user5.created_on = today - datetime.timedelta( days=cls.emw_settings.inactivity_period) cls.active_user5.save() cls.active_user6 = CommCareUser.create( domain=cls.domains[1].name, username='******', password='******', created_by=None, created_via=None, is_active=True, ) cls.users = [ cls.active_user1, cls.active_user2, cls.active_user3, cls.active_user4, cls.active_user5, cls.active_user6, CommCareUser.create(domain=cls.domains[0].name, username='******', password='******', created_by=None, created_via=None, is_active=False), CommCareUser.create(domain=cls.domains[1].name, username='******', password='******', created_by=None, created_via=None, is_active=False), ] form_submissions = [ (TestFormMetadata( domain=cls.domains[0].name, received_on=today - datetime.timedelta( days=cls.emw_settings.inactivity_period - 1), user_id=cls.active_user1.user_id, username=cls.active_user1.username, ), cls.active_user1), (TestFormMetadata( domain=cls.domains[0].name, received_on=today - datetime.timedelta(days=cls.emw_settings.inactivity_period), user_id=cls.active_user2.user_id, username=cls.active_user2.username, ), cls.active_user2), (TestFormMetadata( domain=cls.domains[1].name, received_on=today - datetime.timedelta( days=cls.emw_settings.inactivity_period - 10), user_id=cls.active_user3.user_id, username=cls.active_user3.username, ), cls.active_user3), (TestFormMetadata( domain=cls.domains[1].name, received_on=today - datetime.timedelta( days=cls.emw_settings.inactivity_period + 1), user_id=cls.active_user6.user_id, username=cls.active_user6.username, ), cls.active_user6), ] for form_metadata, user in form_submissions: # ensure users are as old as the received_on dates of their submissions user.created_on = form_metadata.received_on user.save() form_pair = make_es_ready_form(form_metadata) send_to_elasticsearch('forms', form_pair.json_form) mark_latest_submission(form_metadata.domain, user, form_metadata.app_id, "build-id", "2", {'deviceID': 'device-id'}, form_metadata.received_on) for user in cls.users: fresh_user = CommCareUser.get_by_user_id(user.user_id) elastic_user = transform_user_for_elasticsearch( fresh_user.to_json()) send_to_elasticsearch('users', elastic_user) cls.es.indices.refresh(USER_INDEX_INFO.alias) cls.es.indices.refresh(XFORM_INDEX_INFO.alias)
def setUpClass(cls): super(PillowtopReindexerTest, cls).setUpClass() with trap_extra_setup(ConnectionError): initialize_index_and_mapping(get_es_new(), CASE_INDEX_INFO)
def get_xform_pillow(pillow_id='xform-pillow', ucr_division=None, include_ucrs=None, exclude_ucrs=None, num_processes=1, process_num=0, ucr_configs=None, skip_ucr=False, processor_chunk_size=DEFAULT_PROCESSOR_CHUNK_SIZE, topics=None, **kwargs): """Generic XForm change processor Processors: - :py:class:`corehq.apps.userreports.pillow.ConfigurableReportPillowProcessor` (disabled when skip_ucr=True) - :py:class:`pillowtop.processors.elastic.BulkElasticProcessor` - :py:class:`corehq.pillows.user.UnknownUsersProcessor` (disabled when RUN_UNKNOWN_USER_PILLOW=False) - :py:class:`pillowtop.form.FormSubmissionMetadataTrackerProcessor` (disabled when RUN_FORM_META_PILLOW=False) """ # avoid circular dependency from corehq.pillows.reportxform import transform_xform_for_report_forms_index, report_xform_filter from corehq.pillows.mappings.user_mapping import USER_INDEX if topics: assert set(topics).issubset( FORM_TOPICS), "This is a pillow to process cases only" topics = topics or FORM_TOPICS change_feed = KafkaChangeFeed(topics, client_id=pillow_id, num_processes=num_processes, process_num=process_num) ucr_processor = ConfigurableReportPillowProcessor( data_source_providers=[ DynamicDataSourceProvider('XFormInstance'), StaticDataSourceProvider('XFormInstance') ], ucr_division=ucr_division, include_ucrs=include_ucrs, exclude_ucrs=exclude_ucrs, run_migrations=( process_num == 0), # only first process runs migrations ) xform_to_es_processor = BulkElasticProcessor( elasticsearch=get_es_new(), index_info=XFORM_INDEX_INFO, doc_prep_fn=transform_xform_for_elasticsearch, doc_filter_fn=xform_pillow_filter, ) unknown_user_form_processor = UnknownUsersProcessor() form_meta_processor = FormSubmissionMetadataTrackerProcessor() checkpoint_id = "{}-{}-{}-{}".format(pillow_id, XFORM_INDEX_INFO.index, REPORT_XFORM_INDEX_INFO.index, USER_INDEX) checkpoint = KafkaPillowCheckpoint(checkpoint_id, topics) event_handler = KafkaCheckpointEventHandler( checkpoint=checkpoint, checkpoint_frequency=1000, change_feed=change_feed, checkpoint_callback=ucr_processor) if ucr_configs: ucr_processor.bootstrap(ucr_configs) processors = [xform_to_es_processor] if settings.RUN_UNKNOWN_USER_PILLOW: processors.append(unknown_user_form_processor) if settings.RUN_FORM_META_PILLOW: processors.append(form_meta_processor) if not settings.ENTERPRISE_MODE: xform_to_report_es_processor = BulkElasticProcessor( elasticsearch=get_es_new(), index_info=REPORT_XFORM_INDEX_INFO, doc_prep_fn=transform_xform_for_report_forms_index, doc_filter_fn=report_xform_filter) processors.append(xform_to_report_es_processor) if not skip_ucr: processors.append(ucr_processor) return ConstructedPillow(name=pillow_id, change_feed=change_feed, checkpoint=checkpoint, change_processed_event_handler=event_handler, processor=processors, processor_chunk_size=processor_chunk_size)
def setUp(self): self.es = get_es_new() reset_es_index(XFORM_INDEX_INFO) initialize_index_and_mapping(self.es, XFORM_INDEX_INFO)
def handle(self, *args, **options): runs = [] all_es_indices = get_all_expected_es_indices() es = get_es_new() indices_needing_reindex = [ info for info in all_es_indices if not es.indices.exists(info.index) ] if not indices_needing_reindex: print 'Nothing needs to be reindexed' return print "Reindexing:\n\t", print '\n\t'.join(map(unicode, indices_needing_reindex)) preindex_message = """ Heads up! %s is going to start preindexing the following indices:\n %s This may take a while, so don't deploy until all these have reported finishing. """ % (settings.EMAIL_SUBJECT_PREFIX, '\n\t'.join( map(unicode, indices_needing_reindex))) mail_admins("Pillow preindexing starting", preindex_message) start = datetime.utcnow() for index_info in indices_needing_reindex: # loop through pillows once before running greenlets # to fail hard on misconfigured pillows reindex_command = get_reindex_commands(index_info.alias) if not reindex_command: raise Exception( "Error, pillow [%s] is not configured " "with its own management command reindex command " "- it needs one" % index_info.alias) for index_info in indices_needing_reindex: print index_info.alias g = gevent.spawn(do_reindex, index_info.alias) runs.append(g) if len(indices_needing_reindex) > 0: gevent.joinall(runs) try: for job in runs: job.get() except Exception: f = StringIO() traceback.print_exc(file=f) mail_admins("Pillow preindexing failed", f.getvalue()) raise else: mail_admins( "Pillow preindexing completed", "Reindexing %s took %s seconds" % (', '.join(map(unicode, indices_needing_reindex)), (datetime.utcnow() - start).seconds)) print "All pillowtop reindexing jobs completed"
def setUpClass(cls): super(GroupsToUserReindexerTest, cls).setUpClass() cls.es = get_es_new() ensure_index_deleted(USER_INDEX) initialize_index_and_mapping(cls.es, USER_INDEX_INFO)
def setUpClass(cls): super(TestFixFormsWithMissingXmlns, cls).setUpClass() cls.es = get_es_new() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): initialize_index_and_mapping(cls.es, XFORM_INDEX_INFO)
def get_user_es_processor(): return ElasticProcessor( elasticsearch=get_es_new(), index_info=USER_INDEX_INFO, doc_prep_fn=transform_user_for_elasticsearch, )
def setUpClass(cls): super(TestGroupResource, cls).setUpClass() cls.es = get_es_new() ensure_index_deleted(GROUP_INDEX_INFO.index) initialize_index_and_mapping(cls.es, GROUP_INDEX_INFO)
def setUp(self): ensure_index_deleted(USER_INDEX) self.es_client = get_es_new() initialize_index_and_mapping(self.es_client, USER_INDEX_INFO)
def __init__(self, domain): super(ESView, self).__init__() self.domain = domain.lower() self.es = get_es_new()
def setUpClass(cls): super(TestFilterDslLookups, cls).setUpClass() with trap_extra_setup(ConnectionError): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_SEARCH_INDEX_INFO) cls.child_case_id = 'margaery' cls.parent_case_id = 'mace' cls.grandparent_case_id = 'olenna' cls.domain = "Tyrell" factory = CaseFactory(domain=cls.domain) grandparent_case = CaseStructure(case_id=cls.grandparent_case_id, attrs={ 'create': True, 'case_type': 'grandparent', 'update': { "name": "Olenna", "alias": "Queen of thorns", "house": "Tyrell", }, }) parent_case = CaseStructure(case_id=cls.parent_case_id, attrs={ 'create': True, 'case_type': 'parent', 'update': { "name": "Mace", "house": "Tyrell", }, }, indices=[ CaseIndex( grandparent_case, identifier='mother', relationship='child', ) ]) child_case = CaseStructure( case_id=cls.child_case_id, attrs={ 'create': True, 'case_type': 'child', 'update': { "name": "Margaery", "house": "Tyrell", }, }, indices=[ CaseIndex( parent_case, identifier='father', relationship='extension', ) ], ) for case in factory.create_or_update_cases([child_case]): send_to_elasticsearch( 'case_search', transform_case_for_elasticsearch(case.to_json())) cls.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index)
def __init__(self): self._es = get_es_new()
def setUp(self): super(ReportXformReindexerTest, self).setUp() FormProcessorTestUtils.delete_all_xforms() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() ensure_index_deleted(REPORT_XFORM_INDEX_INFO.index)
def setUpClass(cls): super(DomainCalculatedPropertiesTest, cls).setUpClass() cls.es = [{ 'info': i, 'instance': get_es_new(), } for i in [CASE_INDEX_INFO, SMS_INDEX_INFO, XFORM_INDEX_INFO]]
def setUp(self): self.elasticsearch = get_es_new() for index in [GROUP_INDEX_INFO, USER_INDEX_INFO]: ensure_index_deleted(index.index) initialize_index_and_mapping(self.elasticsearch, index) delete_all_groups()