def setUp(self): if not SELENIUM_HUB_URL: # run test on firefox of this machine. self.driver = webdriver.Firefox() else: # run test on stand alone node machine in docker: selenium-firefox self.driver = webdriver.Remote( command_executor=SELENIUM_HUB_URL, desired_capabilities=DesiredCapabilities.FIREFOX) self.driver.implicitly_wait(30) self.base_url = BROWSER_HYPERMAP_URL self.verificationErrors = [] self.accept_next_alert = True print('> clearing SEARCH_URL={0}'.format(SEARCH_URL)) if SEARCH_TYPE == SEARCH_TYPE_SOLR: self.solr = SolrHypermap() # delete solr documents # add the schema print('> updating schema'.format(SEARCH_URL)) self.solr.update_schema(catalog=catalog_test_slug) self.solr.clear_solr(catalog=catalog_test_slug) self.search_engine_endpoint = '{0}/solr/{1}/select'.format( SEARCH_URL, catalog_test_slug) elif SEARCH_TYPE == SEARCH_TYPE_ES: es = ESHypermap() # delete ES documents es.clear_es() self.search_engine_endpoint = '{0}/{1}/_search'.format( SEARCH_URL, catalog_test_slug) else: raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)
def clear_index(): if SEARCH_TYPE == 'solr': LOGGER.debug('Clearing the solr indexes') from hypermap.aggregator.solr import SolrHypermap solrobject = SolrHypermap() solrobject.clear_solr() elif SEARCH_TYPE == 'elasticsearch': LOGGER.debug('Clearing the ES indexes') from hypermap.aggregator.elasticsearch_client import ESHypermap esobject = ESHypermap() esobject.clear_es()
def setUp(self): self.client = Client() user = User.objects.create(username='******') user.set_password('admin') user.save() self.client.login(username="******", password="******") Catalog.objects.get_or_create(name=catalog_test_slug) Layer.objects.all().delete() Service.objects.all().delete() if SEARCH_TYPE == SEARCH_TYPE_SOLR: self.solr = SolrHypermap() self.solr.update_schema(catalog=catalog_test_slug) self.solr.clear_solr(catalog=catalog_test_slug) elif SEARCH_TYPE == SEARCH_TYPE_ES: es = ESHypermap() es.clear_es() else: raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)
def setUp(self): self.client = Client() user = User.objects.create(username='******') user.set_password('admin') user.save() self.client.login(username="******", password="******") Catalog.objects.get_or_create( name=catalog_test_slug ) Layer.objects.all().delete() Service.objects.all().delete() if SEARCH_TYPE == SEARCH_TYPE_SOLR: self.solr = SolrHypermap() self.solr.update_schema(catalog=catalog_test_slug) self.solr.clear_solr(catalog=catalog_test_slug) elif SEARCH_TYPE == SEARCH_TYPE_ES: es = ESHypermap() es.clear_es() else: raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)
def setUp(self): if not SELENIUM_HUB_URL: # run test on firefox of this machine. self.driver = webdriver.Firefox() else: # run test on stand alone node machine in docker: selenium-firefox self.driver = webdriver.Remote( command_executor=SELENIUM_HUB_URL, desired_capabilities=DesiredCapabilities.FIREFOX ) self.driver.implicitly_wait(30) self.base_url = BROWSER_HYPERMAP_URL self.verificationErrors = [] self.accept_next_alert = True print '> clearing SEARCH_URL={0}'.format(SEARCH_URL) if SEARCH_TYPE == SEARCH_TYPE_SOLR: self.solr = SolrHypermap() # delete solr documents # add the schema print '> updating schema'.format(SEARCH_URL) self.solr.update_schema(catalog=catalog_test_slug) self.solr.clear_solr(catalog=catalog_test_slug) self.search_engine_endpoint = '{0}/solr/{1}/select'.format( SEARCH_URL, catalog_test_slug ) elif SEARCH_TYPE == SEARCH_TYPE_ES: es = ESHypermap() # delete ES documents es.clear_es() self.search_engine_endpoint = '{0}/{1}/_search'.format( SEARCH_URL, catalog_test_slug ) else: raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)
def setUp(self): signals.post_save.disconnect(layer_post_save, sender=Layer) signals.post_save.disconnect(service_post_save, sender=Service) catalog_test_slug = "hypermap" if SEARCH_TYPE == SEARCH_TYPE_SOLR: self.solr = SolrHypermap() # delete solr documents # add the schema print('> updating schema'.format(SEARCH_URL)) self.solr.update_schema(catalog=catalog_test_slug) print('> clearing SEARCH_URL={0}'.format(SEARCH_URL)) self.solr.clear_solr(catalog=catalog_test_slug) self.search_engine_endpoint = '{0}/solr/{1}/select'.format( SEARCH_URL, catalog_test_slug ) elif SEARCH_TYPE == SEARCH_TYPE_ES: es = ESHypermap() # delete ES documents es.clear_es() self.search_engine_endpoint = '{0}/{1}/_search'.format( SEARCH_URL, catalog_test_slug ) else: raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE) catalog, created = Catalog.objects.get_or_create( name=catalog_test_slug ) service = Service( url='http://fakeurl.com', title='Title', type='OGC:WMS', catalog=catalog ) service.save() layer = Layer( name='Layer 1', bbox_x0=-40.0, bbox_x1=-20.0, bbox_y0=-40.0, bbox_y1=-20.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2000, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) layer = Layer( name='Layer 2', bbox_x0=-40.0, bbox_x1=-20.0, bbox_y0=20.0, bbox_y1=40.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2001, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) layer = Layer( name='Layer 3', bbox_x0=20.0, bbox_x1=40.0, bbox_y0=20.0, bbox_y1=40.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2002, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) layer = Layer( name='Layer 4', bbox_x0=20.0, bbox_x1=40.0, bbox_y0=-40.0, bbox_y1=-20.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2003, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) # solr have commitWithin 1500. # before to proceed with the tests wait for 2 secs. # otherwise it will return zero docs in the next test. service.index_layers(with_cache=False) time.sleep(2) self.api_url = "{0}{1}".format( settings.SITE_URL, reverse("search_api", args=[catalog_test_slug]) ) self.default_params = { "search_engine": SEARCH_TYPE, "search_engine_endpoint": self.search_engine_endpoint, "q_time": "[* TO *]", "q_geo": "[-90,-180 TO 90,180]", "d_docs_limit": 0, "d_docs_page": 1, "d_docs_sort": "score" }
def clear_es(): print 'Clearing the ES indexes' from hypermap.aggregator.elasticsearch_client import ESHypermap esobject = ESHypermap() esobject.clear_es()
def setUp(self): signals.post_save.disconnect(layer_post_save, sender=Layer) signals.post_save.disconnect(service_post_save, sender=Service) catalog_test_slug = "hypermap" if SEARCH_TYPE == SEARCH_TYPE_SOLR: self.solr = SolrHypermap() # delete solr documents # add the schema print '> updating schema'.format(SEARCH_URL) self.solr.update_schema(catalog=catalog_test_slug) print '> clearing SEARCH_URL={0}'.format(SEARCH_URL) self.solr.clear_solr(catalog=catalog_test_slug) self.search_engine_endpoint = '{0}/solr/{1}/select'.format( SEARCH_URL, catalog_test_slug ) elif SEARCH_TYPE == SEARCH_TYPE_ES: es = ESHypermap() # delete ES documents es.clear_es() self.search_engine_endpoint = '{0}/{1}/_search'.format( SEARCH_URL, catalog_test_slug ) else: raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE) catalog, created = Catalog.objects.get_or_create( name=catalog_test_slug ) service = Service( url='http://fakeurl.com', title='Title', type='OGC:WMS', catalog=catalog ) service.save() layer = Layer( name='Layer 1', bbox_x0=-40.0, bbox_x1=-20.0, bbox_y0=-40.0, bbox_y1=-20.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2000, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) layer = Layer( name='Layer 2', bbox_x0=-40.0, bbox_x1=-20.0, bbox_y0=20.0, bbox_y1=40.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2001, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) layer = Layer( name='Layer 3', bbox_x0=20.0, bbox_x1=40.0, bbox_y0=20.0, bbox_y1=40.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2002, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) layer = Layer( name='Layer 4', bbox_x0=20.0, bbox_x1=40.0, bbox_y0=-40.0, bbox_y1=-20.0, service=service, catalog=catalog ) layer.title = layer.name layer.save() layer.created = datetime.datetime(2003, 3, 1, 0, 0, 0) layer.save() service.layer_set.add(layer) # solr have commitWithin 1500. # before to proceed with the tests wait for 2 secs. # otherwise it will return zero docs in the next test. service.index_layers(with_cache=False) time.sleep(2) self.api_url = "{0}{1}".format( settings.SITE_URL, reverse("search_api", args=[catalog_test_slug]) ) self.default_params = { "search_engine": SEARCH_TYPE, "search_engine_endpoint": self.search_engine_endpoint, "q_time": "[* TO *]", "q_geo": "[-90,-180 TO 90,180]", "d_docs_limit": 0, "d_docs_page": 1, "d_docs_sort": "score" }