def setUp(self): with trap_extra_setup(ConnectionError): ensure_index_deleted(USER_INDEX) self.es_client = get_es_new() initialize_index_and_mapping(self.es_client, USER_INDEX_INFO) self.user_id = 'user1' _create_es_user(self.es_client, self.user_id, self.domain)
def setUp(self): super().setUp() delete_all_users() self.domain_obj = create_domain(self.domain) enable_usercase(self.domain) with trap_extra_setup(ConnectionError): self.es = get_es_new() initialize_index_and_mapping(self.es, CASE_SEARCH_INDEX_INFO) username = normalize_username("mobile_worker_1", self.domain) self.mobile_worker = CommCareUser.create(self.domain, username, "123", None, None) sync_user_cases(self.mobile_worker) self.checkin_case = CaseFactory(self.domain).create_case( case_type="checkin", owner_id=self.mobile_worker.get_id, update={"username": self.mobile_worker.raw_username}, ) send_to_elasticsearch( "case_search", transform_case_for_elasticsearch(self.checkin_case.to_json())) self.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index) self.case_accessor = CaseAccessors(self.domain)
def setUp(self): super(BaseESAccessorsTest, self).setUp() with trap_extra_setup(ConnectionError): self.es = get_es_new() ensure_index_deleted(self.es_index_info.index) self.domain = 'esdomain' initialize_index_and_mapping(self.es, self.es_index_info)
def setUp(self): delete_all_locations() self.domain_obj = create_domain(self.domain) self.region = LocationType.objects.create(domain=self.domain, name="region") self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region) self.data_source_config = DataSourceConfiguration( domain=self.domain, display_name='Locations in Westworld', referenced_doc_type='Location', table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": "name" }, "column_id": "location_name", "display_name": "location_name", "datatype": "string" }], ) self.data_source_config.validate() self.data_source_config.save() self.pillow = get_kafka_ucr_pillow() self.pillow.bootstrap(configs=[self.data_source_config]) with trap_extra_setup(KafkaUnavailableError): self.pillow.get_change_feed().get_current_offsets()
def setUpClass(cls): super(CouchformsESAnalyticsTest, cls).setUpClass() @patch('couchforms.analytics.FormES.index', XFORM_INDEX_INFO.index) @patch('corehq.apps.es.es_query.ES_META', TEST_ES_META) @patch('corehq.elastic.ES_META', TEST_ES_META) def create_form_and_sync_to_es(received_on): with process_pillow_changes('xform-pillow', {'skip_ucr': True}): with process_pillow_changes('DefaultChangeFeedPillow'): metadata = TestFormMetadata(domain=cls.domain, app_id=cls.app_id, xmlns=cls.xmlns, received_on=received_on) form = get_form_ready_to_save(metadata, is_db_test=True) form_processor = FormProcessorInterface(domain=cls.domain) form_processor.save_processed_models([form]) return form from casexml.apps.case.tests.util import delete_all_xforms delete_all_xforms() cls.now = datetime.datetime.utcnow() cls._60_days = datetime.timedelta(days=60) cls.domain = 'my_crazy_analytics_domain' cls.app_id = uuid.uuid4().hex cls.xmlns = 'my://crazy.xmlns/' with trap_extra_setup(ConnectionError): cls.elasticsearch = get_es_new() initialize_index_and_mapping(cls.elasticsearch, XFORM_INDEX_INFO) cls.forms = [ create_form_and_sync_to_es(cls.now), create_form_and_sync_to_es(cls.now - cls._60_days) ] cls.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
def setUp(self): self.es = get_es_new() self.index = TEST_INDEX_INFO.index with trap_extra_setup(ConnectionError): ensure_index_deleted(self.index) initialize_index_and_mapping(self.es, TEST_INDEX_INFO)
def setUpClass(cls): super(XFormESTestCase, cls).setUpClass() cls.now = datetime.datetime.utcnow() cls.forms = [] with trap_extra_setup(ConnectionError): cls.es = get_es_new() initialize_index_and_mapping(cls.es, XFORM_INDEX_INFO)
def setUp(self): self.domain_obj = create_domain(self.domain) self.region = LocationType.objects.create(domain=self.domain, name="region") self.town = LocationType.objects.create(domain=self.domain, name="town", parent_type=self.region) self.data_source_config = DataSourceConfiguration( domain=self.domain, display_name='Locations in Westworld', referenced_doc_type='Location', table_id=clean_table_name(self.domain, str(uuid.uuid4().hex)), configured_filter={}, configured_indicators=[{ "type": "expression", "expression": { "type": "property_name", "property_name": "name" }, "column_id": "location_name", "display_name": "location_name", "datatype": "string" }], ) self.data_source_config.validate() self.data_source_config.save() self.pillow = get_kafka_ucr_pillow() self.pillow.bootstrap(configs=[self.data_source_config]) with trap_extra_setup(KafkaUnavailableError): self.pillow.get_change_feed().get_latest_offsets()
def setUp(self): super(ReportXformPillowTest, self).setUp() FormProcessorTestUtils.delete_all_xforms() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() ensure_index_deleted(REPORT_XFORM_INDEX_INFO.index) initialize_index_and_mapping(self.elasticsearch, REPORT_XFORM_INDEX_INFO)
def setUpClass(cls): super(ExportTest, cls).setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) case = new_case(_id='robin', name='batman', foo="apple", bar="banana", date='2016-4-24') send_to_elasticsearch('cases', case.to_json()) case = new_case(owner_id="some_other_owner", foo="apple", bar="banana", date='2016-4-04') send_to_elasticsearch('cases', case.to_json()) case = new_case(type="some_other_type", foo="apple", bar="banana") send_to_elasticsearch('cases', case.to_json()) case = new_case(closed=True, foo="apple", bar="banana") send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index) cache.clear()
def setUpClass(cls): super().setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) initialize_index_and_mapping(cls.es, USER_INDEX_INFO) cls.domain = uuid.uuid4().hex create_domain(cls.domain) cls.now = datetime.utcnow() cases = [ new_case(domain=cls.domain, foo="apple", bar="banana", server_modified_on=cls.now - timedelta(hours=3)), new_case(domain=cls.domain, foo="orange", bar="pear", server_modified_on=cls.now - timedelta(hours=2)), ] for case in cases: send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index)
def setUp(self): super(ReportCaseReindexerTest, self).setUp() FormProcessorTestUtils.delete_all_xforms() FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() ensure_index_deleted(REPORT_CASE_INDEX_INFO.index)
def setUpClass(cls): super(TestBlobStream, cls).setUpClass() with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS cls.db = TemporaryS3BlobDB(config) cls.meta = cls.db.put(BytesIO(b"bytes"), meta=new_meta())
def setUp(self): super(CaseMultimediaS3DBTest, self).setUp() with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS self.s3db = TemporaryS3BlobDB(config) assert get_blob_db() is self.s3db, (get_blob_db(), self.s3db)
def setUpClass(cls): @patch('couchforms.analytics.FormES.index', XFORM_INDEX_INFO.index) @patch('corehq.apps.es.es_query.ES_META', TEST_ES_META) @patch('corehq.elastic.ES_META', TEST_ES_META) def create_form_and_sync_to_es(received_on): with process_kafka_changes('XFormToElasticsearchPillow'): with process_couch_changes('DefaultChangeFeedPillow'): metadata = TestFormMetadata(domain=cls.domain, app_id=cls.app_id, xmlns=cls.xmlns, received_on=received_on) form = get_form_ready_to_save(metadata, is_db_test=True) form_processor = FormProcessorInterface(domain=cls.domain) form_processor.save_processed_models([form]) return form from casexml.apps.case.tests.util import delete_all_xforms delete_all_xforms() cls.now = datetime.datetime.utcnow() cls._60_days = datetime.timedelta(days=60) cls.domain = 'my_crazy_analytics_domain' cls.app_id = uuid.uuid4().hex cls.xmlns = 'my://crazy.xmlns/' with trap_extra_setup(ConnectionError): cls.elasticsearch = get_es_new() initialize_index_and_mapping(cls.elasticsearch, XFORM_INDEX_INFO) cls.forms = [create_form_and_sync_to_es(cls.now), create_form_and_sync_to_es(cls.now-cls._60_days)] cls.elasticsearch.indices.refresh(XFORM_INDEX_INFO.index)
def setUpClass(cls): super(BaseMigrationTestCase, cls).setUpClass() with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS cls.s3db = TemporaryS3BlobDB(config) assert get_blob_db() is cls.s3db, (get_blob_db(), cls.s3db)
def setUpClass(cls): case_pillow = CasePillow(online=False) group_pillow = GroupPillow(online=False) cls.pillows = [case_pillow, group_pillow] with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): for pillow in cls.pillows: completely_initialize_pillow_index(pillow) case = new_case(closed=True) case_pillow.send_robust(case.to_json()) case = new_case(closed=False) case_pillow.send_robust(case.to_json()) case = new_case(closed=True, owner_id="foo") case_pillow.send_robust(case.to_json()) case = new_case(closed=False, owner_id="bar") case_pillow.send_robust(case.to_json()) group = Group(_id=uuid.uuid4().hex, users=["foo", "bar"]) cls.group_id = group.get_id group_pillow.send_robust(group.to_json()) for pillow in cls.pillows: pillow.get_es_new().indices.refresh(pillow.es_index)
def setUp(self): super(XFormPillowTest, self).setUp() FormProcessorTestUtils.delete_all_xforms() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() initialize_index_and_mapping(self.elasticsearch, XFORM_INDEX_INFO) delete_es_index(XFORM_INDEX_INFO.index)
def setUpClass(cls): with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS cls.s3db = TemporaryS3BlobDB(config) cls.fsdb = TemporaryFilesystemBlobDB() cls.db = mod.MigratingBlobDB(cls.s3db, cls.fsdb)
def setUp(self): self.index = TEST_INDEX_INFO.index self.es_alias = TEST_INDEX_INFO.alias self.es = get_es_new() self.es_interface = ElasticsearchInterface(self.es) with trap_extra_setup(ConnectionError): ensure_index_deleted(self.index)
def setUpClass(cls): super(TestUserSyncToEs, cls).setUpClass() # create the index cls.es = get_es_new() with trap_extra_setup(ConnectionError): initialize_index_and_mapping(cls.es, USER_INDEX_INFO)
def setUpClass(cls): super().setUpClass() es = get_es_new() with trap_extra_setup(ConnectionError): initialize_index_and_mapping(es, CASE_SEARCH_INDEX_INFO) cls.domain = 'naboo' cls.domain_obj = create_domain(cls.domain) cls.case_type = 'people' cls.factory = CaseFactory(cls.domain) cls.case1 = cls.factory.create_case(case_name="foo", case_type=cls.case_type, update={"age": 2}, close=True) cls.case2 = cls.factory.create_case(case_name="foo", case_type=cls.case_type, update={"age": 2}) cls.case3 = cls.factory.create_case(case_name="foo", case_type=cls.case_type, update={"age": 2}) for case in [cls.case1, cls.case2, cls.case3]: send_to_elasticsearch( 'case_search', transform_case_for_elasticsearch(case.to_json())) es.indices.refresh(CASE_SEARCH_INDEX_INFO.index)
def setUp(self): super(CasePillowTest, self).setUp() FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() initialize_index_and_mapping(self.elasticsearch, CASE_INDEX_INFO) delete_es_index(CASE_INDEX_INFO.index)
def setUp(self): super(DeleteAtachmentsS3DBTests, self).setUp() with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS self.s3db = TemporaryS3BlobDB(config) assert get_blob_db() is self.s3db, (get_blob_db(), self.s3db)
def setUpClass(cls): super().setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) cls.domain = uuid.uuid4().hex now = datetime.utcnow() cases = [ new_case(domain=cls.domain, foo="apple", bar="banana", server_modified_on=now - timedelta(hours=3)), new_case(domain=cls.domain, foo="orange", bar="pear", server_modified_on=now - timedelta(hours=2)), ] for case in cases: send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index) cls.export_instance = CaseExportInstance( export_format=Format.UNZIPPED_CSV, domain=cls.domain, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="Foo column", item=ExportItem(path=[PathNode(name="foo")]), selected=True, ), ExportColumn( label="Bar column", item=ExportItem(path=[PathNode(name="bar")]), selected=True, ) ]) ]) cls.export_instance.save() cls.incremental_export = IncrementalExport.objects.create( domain=cls.domain, name='test_export', export_instance_id=cls.export_instance.get_id, connection_settings=ConnectionSettings.objects.create( domain=cls.domain, name='test conn', url='http://somewhere', auth_type=BASIC_AUTH, ))
def setUp(self): super(AppPillowTest, self).setUp() FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.es = get_es_new() ensure_index_deleted(APP_INDEX_INFO.index) initialize_index_and_mapping(self.es, APP_INDEX_INFO)
def setUp(self): self.pillow = TestElasticPillow(online=False) self.es = self.pillow.get_es_new() self.index = self.pillow.es_index with trap_extra_setup(ConnectionError): ensure_index_deleted(self.index) completely_initialize_pillow_index(self.pillow)
def setUp(self): FormProcessorTestUtils.delete_all_cases(self.domain) FormProcessorTestUtils.delete_all_ledgers(self.domain) with trap_extra_setup(ConnectionError): self.pillow = get_ledger_to_elasticsearch_pillow() self.elasticsearch = get_es_new() ensure_index_deleted(LEDGER_INDEX_INFO.index) initialize_index_and_mapping(get_es_new(), LEDGER_INDEX_INFO)
def setUp(self): super(BaseESAccessorsTest, self).setUp() with trap_extra_setup(ConnectionError): self.es = get_es_new() self._delete_es_indices() self.domain = uuid.uuid4().hex for index_info in self.es_index_infos: initialize_index_and_mapping(self.es, index_info)
def setUpClass(cls): super(TestFilterDslLookups, cls).setUpClass() with trap_extra_setup(ConnectionError): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_SEARCH_INDEX_INFO) cls.child_case_id = 'margaery' cls.parent_case_id = 'mace' cls.grandparent_case_id = 'olenna' cls.domain = "Tyrell" factory = CaseFactory(domain=cls.domain) grandparent_case = CaseStructure( case_id=cls.grandparent_case_id, attrs={ 'create': True, 'case_type': 'grandparent', 'update': { "name": "Olenna", "alias": "Queen of thorns", "house": "Tyrell", }, }) parent_case = CaseStructure( case_id=cls.parent_case_id, attrs={ 'create': True, 'case_type': 'parent', 'update': { "name": "Mace", "house": "Tyrell", }, }, indices=[CaseIndex( grandparent_case, identifier='mother', relationship='child', )]) child_case = CaseStructure( case_id=cls.child_case_id, attrs={ 'create': True, 'case_type': 'child', 'update': { "name": "Margaery", "house": "Tyrell", }, }, indices=[CaseIndex( parent_case, identifier='father', relationship='extension', )], ) for case in factory.create_or_update_cases([child_case]): send_to_elasticsearch('case_search', transform_case_for_elasticsearch(case.to_json())) cls.es.indices.refresh(CASE_SEARCH_INDEX_INFO.index)
def setUp(self): self.pillow = get_kafka_ucr_pillow(topics=['case-sql']) self.config = get_data_source_with_related_doc_type() self.config.save() self.adapter = get_indicator_adapter(self.config) self.pillow.bootstrap(configs=[self.config]) with trap_extra_setup(KafkaUnavailableError): self.pillow.get_change_feed().get_latest_offsets()
def setUpClass(cls): super(CallCenterLocationOwnerOptionsViewTest, cls).setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): es = get_es_new() ensure_index_deleted(USER_INDEX_INFO.index) ensure_index_deleted(GROUP_INDEX_INFO.index) initialize_index_and_mapping(es, USER_INDEX_INFO) initialize_index_and_mapping(es, GROUP_INDEX_INFO) # Create domain cls.domain = create_domain(TEST_DOMAIN) cls.domain.save() CALL_CENTER_LOCATION_OWNERS.set(cls.domain.name, True, NAMESPACE_DOMAIN) cls.username = "******" cls.password = "******" cls.web_user = WebUser.create(cls.domain.name, cls.username, cls.password) cls.web_user.save() # Create case sharing groups cls.groups = [] for i in range(2): group = Group(domain=TEST_DOMAIN, name="group{}".format(i), case_sharing=True) group.save() send_to_elasticsearch('groups', group.to_json()) cls.groups.append(group) es.indices.refresh(GROUP_INDEX_INFO.index) cls.group_ids = {g._id for g in cls.groups} # Create locations LocationType.objects.get_or_create( domain=cls.domain.name, name=LOCATION_TYPE, shares_cases=True, ) cls.locations = [ make_loc('loc{}'.format(i), type=LOCATION_TYPE, domain=TEST_DOMAIN) for i in range(4) ] cls.location_ids = {l._id for l in cls.locations} # Create users cls.users = [ CommCareUser.create(TEST_DOMAIN, 'user{}'.format(i), '***') for i in range(3) ] for user in cls.users: send_to_elasticsearch('users', user.to_json()) es.indices.refresh(USER_INDEX_INFO.index) cls.user_ids = {u._id for u in cls.users}
def setUp(self): super(CasePillowTest, self).setUp() self.process_case_changes = process_pillow_changes('DefaultChangeFeedPillow') self.process_case_changes.add_pillow('case-pillow', {'skip_ucr': True}) FormProcessorTestUtils.delete_all_cases() with trap_extra_setup(ConnectionError): self.elasticsearch = get_es_new() initialize_index_and_mapping(self.elasticsearch, CASE_INDEX_INFO) initialize_index_and_mapping(self.elasticsearch, CASE_SEARCH_INDEX_INFO)
def setUpClass(cls): cls.project = Domain(name=cls.domain) cls.project.save() with trap_extra_setup(ConnectionError): ensure_index_deleted(CASE_SEARCH_INDEX) cls.es_client = get_es_new() initialize_index_and_mapping(cls.es_client, CASE_SEARCH_INDEX_INFO)
def setUp(self): super().setUp() self.es = get_es_new() with trap_extra_setup(ConnectionError): initialize_index_and_mapping(self.es, CASE_SEARCH_INDEX_INFO) self.domain = 'naboo' self.factory = CaseFactory(self.domain)
def setUp(self): super(LedgerPillowTestCouch, self).setUp() FormProcessorTestUtils.delete_all_ledgers(self.domain) FormProcessorTestUtils.delete_all_cases(self.domain) with trap_extra_setup(ConnectionError): self.pillow = get_ledger_to_elasticsearch_pillow() self.elasticsearch = get_es_new() ensure_index_deleted(LEDGER_INDEX_INFO.index) initialize_index_and_mapping(get_es_new(), LEDGER_INDEX_INFO)
def setUp(self): self._fake_couch = FakeCouchDb() self._fake_couch.dbname = 'test-couchdb' with trap_extra_setup(KafkaUnavailableError): self.consumer = KafkaConsumer( topics.CASE, group_id='test-consumer', bootstrap_servers=[settings.KAFKA_URL], consumer_timeout_ms=100, ) self.pillow = get_change_feed_pillow_for_db('fake-changefeed-pillow-id', self._fake_couch)
def setUp(cls): cls._fake_couch = FakeCouchDb() cls._fake_couch.dbname = 'test-couchdb' with trap_extra_setup(KafkaUnavailableError): cls.consumer = KafkaConsumer( topics.CASE, group_id='test-consumer', bootstrap_servers=[settings.KAFKA_URL], consumer_timeout_ms=100, ) cls.pillow = ChangeFeedPillow(cls._fake_couch, kafka=get_kafka_client(), checkpoint=None)
def setUp(self): super(BaseESAccessorsTest, self).setUp() with trap_extra_setup(ConnectionError): self.es = get_es_new() self._delete_es_index() self.domain = uuid.uuid4().hex if isinstance(self.es_index_info, (list, tuple)): for index_info in self.es_index_info: initialize_index_and_mapping(self.es, index_info) else: initialize_index_and_mapping(self.es, self.es_index_info)
def setUp(self): self.user = CommCareUser.create(DOMAIN, 'user1', '***') self.request = Mock() self.request.method = 'POST' self.request.POST = {} self.request.project.commtrack_enabled = False self.request.couch_user = self.user.user_id with trap_extra_setup(ConnectionError): ensure_index_deleted(self.es_index) self.pillow = self.get_pillow()
def setUpClass(cls): super(AsyncIndicatorTest, cls).setUpClass() cls.pillow = get_kafka_ucr_pillow() cls.config = get_data_source_with_related_doc_type() cls.config.asynchronous = True cls.config.save() cls.adapter = get_indicator_adapter(cls.config) cls.pillow.bootstrap(configs=[cls.config]) with trap_extra_setup(KafkaUnavailableError): cls.pillow.get_change_feed().get_latest_offsets()
def get_test_kafka_consumer(*topics): """ Gets a KafkaConsumer object for the topic, or conditionally raises a skip error for the test if Kafka is not available """ with trap_extra_setup(KafkaUnavailableError): configs = { 'group_id': 'test-{}'.format(uuid.uuid4().hex), 'bootstrap_servers': [settings.KAFKA_URL], 'consumer_timeout_ms': 100, } return KafkaConsumer(*topics, **configs)
def test_request_succeeded(self): with trap_extra_setup(ConnectionError): elasticsearch_instance = get_es_new() initialize_index_and_mapping(elasticsearch_instance, CASE_INDEX_INFO) self.addCleanup(self._ensure_case_index_deleted) self.web_user.set_role(self.domain.name, 'admin') self.web_user.save() correct_credentials = self._get_correct_credentials() response = self._execute_query(correct_credentials) self.assertEqual(response.status_code, 200)
def setUp(self): super(ReportUtilTests, self).setUp() self.user = CommCareUser.create(DOMAIN, 'user1', '***') self.request = Mock() self.request.method = 'POST' self.request.POST = {} self.request.project.commtrack_enabled = False self.request.couch_user = self.user.user_id with trap_extra_setup(ConnectionError): ensure_index_deleted(XFORM_INDEX_INFO.index) initialize_index_and_mapping(get_es_new(), XFORM_INDEX_INFO)
def get_test_kafka_consumer(topic): """ Gets a KafkaConsumer object for the topic, or conditionally raises a skip error for the test if Kafka is not available """ with trap_extra_setup(KafkaUnavailableError): return KafkaConsumer( topic, group_id='test-{}'.format(uuid.uuid4().hex), bootstrap_servers=[settings.KAFKA_URL], consumer_timeout_ms=100, )
def setUp(self): super(BaseMigrationTestCase, self).setUp() with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS self.s3db = TemporaryS3BlobDB(config) assert get_blob_db() is self.s3db, (get_blob_db(), self.s3db) FormProcessorTestUtils.delete_all_cases_forms_ledgers() self.domain_name = uuid.uuid4().hex self.domain = create_domain(self.domain_name) # all new domains are set complete when they are created DomainMigrationProgress.objects.filter(domain=self.domain_name).delete() self.assertFalse(should_use_sql_backend(self.domain_name))
def setUp(self): super(ExportTest, self).setUp() self._clear_docs() self.domain = create_domain(DOMAIN) self.setup_subscription(self.domain.name, SoftwarePlanEdition.ADVANCED) self.couch_user = WebUser.create(None, "test", "foobar") self.couch_user.add_domain_membership(DOMAIN, is_admin=True) self.couch_user.save() with trap_extra_setup(ConnectionError): ensure_index_deleted(self.es_index) self.pillow = self.get_pillow()
def setUp(self): super(ExportTest, self).setUp() self._clear_docs() self.domain = create_domain(DOMAIN) self.setup_subscription(self.domain.name, SoftwarePlanEdition.ADVANCED) self.couch_user = WebUser.create(None, "test", "foobar") self.couch_user.add_domain_membership(DOMAIN, is_admin=True) self.couch_user.save() with trap_extra_setup(ConnectionError): self.es = get_es_new() ensure_index_deleted(XFORM_INDEX_INFO.index) initialize_index_and_mapping(self.es, XFORM_INDEX_INFO)
def setUp(self): self._fake_couch = FakeCouchDb() # use a 'real' db name here so that we don't cause other # tests down the line to fail. # Specifically KafkaChangeFeedTest.test_multiple_topics_with_partial_checkpoint self._fake_couch.dbname = 'test_commcarehq' with trap_extra_setup(KafkaUnavailableError): self.consumer = KafkaConsumer( topics.CASE, group_id='test-consumer', bootstrap_servers=[settings.KAFKA_URL], consumer_timeout_ms=100, ) self.pillow = get_change_feed_pillow_for_db('fake-changefeed-pillow-id', self._fake_couch)
def setUpClass(cls): super(CallCenterLocationOwnerOptionsViewTest, cls).setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): es = get_es_new() ensure_index_deleted(USER_INDEX_INFO.index) ensure_index_deleted(GROUP_INDEX_INFO.index) initialize_index_and_mapping(es, USER_INDEX_INFO) initialize_index_and_mapping(es, GROUP_INDEX_INFO) # Create domain cls.domain = create_domain(TEST_DOMAIN) cls.domain.save() CALL_CENTER_LOCATION_OWNERS.set(cls.domain.name, True, NAMESPACE_DOMAIN) cls.username = "******" cls.password = "******" cls.web_user = WebUser.create(cls.domain.name, cls.username, cls.password) cls.web_user.save() # Create case sharing groups cls.groups = [] for i in range(2): group = Group(domain=TEST_DOMAIN, name="group{}".format(i), case_sharing=True) group.save() send_to_elasticsearch('groups', group.to_json()) cls.groups.append(group) es.indices.refresh(GROUP_INDEX_INFO.index) cls.group_ids = {g._id for g in cls.groups} # Create locations LocationType.objects.get_or_create( domain=cls.domain.name, name=LOCATION_TYPE, shares_cases=True, ) cls.locations = [ make_loc('loc{}'.format(i), type=LOCATION_TYPE, domain=TEST_DOMAIN) for i in range(4) ] cls.location_ids = {l._id for l in cls.locations} # Create users cls.users = [CommCareUser.create(TEST_DOMAIN, 'user{}'.format(i), '***') for i in range(3)] for user in cls.users: send_to_elasticsearch('users', user.to_json()) es.indices.refresh(USER_INDEX_INFO.index) cls.user_ids = {u._id for u in cls.users}
def setUpClass(cls): cls.domain = 'test-languages' cls.pillow = AppPillow(online=False) with trap_extra_setup(ConnectionError): completely_initialize_pillow_index(cls.pillow) cls.app1 = Application.new_app(cls.domain, 'My Application 1', APP_V2) cls.app1.langs = ['en', 'es'] cls.app1.save() cls.pillow.send_robust(cls.app1.to_json()) cls.app2 = Application.new_app(cls.domain, 'My Application 2', APP_V2) cls.app2.langs = ['fr'] cls.app2.save() cls.pillow.send_robust(cls.app2.to_json()) cls.pillow.get_es_new().indices.refresh(cls.pillow.es_index)
def setUpClass(cls): cls.domain = 'test-languages' with trap_extra_setup(ConnectionError): cls.es = get_es_new() initialize_index_and_mapping(cls.es, APP_INDEX_INFO) cls.app1 = Application.new_app(cls.domain, 'My Application 1') cls.app1.langs = ['en', 'es'] cls.app1.save() send_to_elasticsearch('apps', cls.app1.to_json()) cls.app2 = Application.new_app(cls.domain, 'My Application 2') cls.app2.langs = ['fr'] cls.app2.save() send_to_elasticsearch('apps', cls.app2.to_json()) cls.es.indices.refresh(APP_INDEX_INFO.index)
def setUp(self): with trap_extra_setup(AttributeError, msg="S3_BLOB_DB_SETTINGS not configured"): config = settings.S3_BLOB_DB_SETTINGS fsdb = TemporaryFilesystemBlobDB() assert get_blob_db() is fsdb, (get_blob_db(), fsdb) self.migrate_docs = docs = [] for i in range(self.test_size): doc = SavedBasicExport(configuration=_mk_config("config-%s" % i)) doc.save() doc.set_payload(("content %s" % i).encode('utf-8')) docs.append(doc) s3db = TemporaryS3BlobDB(config) self.db = TemporaryMigratingBlobDB(s3db, fsdb) assert get_blob_db() is self.db, (get_blob_db(), self.db) BaseMigrationTest.discard_migration_state(self.slug)