Exemplo n.º 1
0
def clear_index():
    if SEARCH_TYPE == 'solr':
        LOGGER.debug('Clearing the solr indexes')
        from hypermap.aggregator.solr import SolrHypermap
        solrobject = SolrHypermap()
        solrobject.clear_solr()
    elif SEARCH_TYPE == 'elasticsearch':
        LOGGER.debug('Clearing the ES indexes')
        from hypermap.aggregator.elasticsearch_client import ESHypermap
        esobject = ESHypermap()
        esobject.clear_es()
Exemplo n.º 2
0
def clear_index():
    if SEARCH_TYPE == 'solr':
        LOGGER.debug('Clearing the solr indexes')
        from hypermap.aggregator.solr import SolrHypermap
        solrobject = SolrHypermap()
        solrobject.clear_solr()
    elif SEARCH_TYPE == 'elasticsearch':
        LOGGER.debug('Clearing the ES indexes')
        from hypermap.aggregator.elasticsearch_client import ESHypermap
        esobject = ESHypermap()
        esobject.clear_es()
Exemplo n.º 3
0
class SearchApiTestCase(TestCase):
    """
    run me
    python manage.py test hypermap.search_api --settings=hypermap.settings.test --failfast
    """

    def tearDown(self):
        signals.post_save.connect(layer_post_save, sender=Layer)
        signals.post_save.connect(service_post_save, sender=Service)

    def setUp(self):
        signals.post_save.disconnect(layer_post_save, sender=Layer)
        signals.post_save.disconnect(service_post_save, sender=Service)

        catalog_test_slug = "hypermap"

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            self.solr = SolrHypermap()
            # delete solr documents
            # add the schema
            print('> updating schema'.format(SEARCH_URL))
            self.solr.update_schema(catalog=catalog_test_slug)
            print('> clearing SEARCH_URL={0}'.format(SEARCH_URL))
            self.solr.clear_solr(catalog=catalog_test_slug)

            self.search_engine_endpoint = '{0}/solr/{1}/select'.format(
                SEARCH_URL, catalog_test_slug
            )
        elif SEARCH_TYPE == SEARCH_TYPE_ES:
            es = ESHypermap()
            # delete ES documents
            es.clear_es()
            self.search_engine_endpoint = '{0}/{1}/_search'.format(
                SEARCH_URL, catalog_test_slug
            )
        else:
            raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)

        catalog, created = Catalog.objects.get_or_create(
            name=catalog_test_slug
        )

        service = Service(
            url='http://fakeurl.com',
            title='Title',
            type='OGC:WMS',
            catalog=catalog
        )
        service.save()

        layer = Layer(
            name='Layer 1',
            bbox_x0=-40.0,
            bbox_x1=-20.0,
            bbox_y0=-40.0,
            bbox_y1=-20.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2000, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        layer = Layer(
            name='Layer 2',
            bbox_x0=-40.0,
            bbox_x1=-20.0,
            bbox_y0=20.0,
            bbox_y1=40.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2001, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        layer = Layer(
            name='Layer 3',
            bbox_x0=20.0,
            bbox_x1=40.0,
            bbox_y0=20.0,
            bbox_y1=40.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2002, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        layer = Layer(
            name='Layer 4',
            bbox_x0=20.0,
            bbox_x1=40.0,
            bbox_y0=-40.0,
            bbox_y1=-20.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2003, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        # solr have commitWithin 1500.
        # before to proceed with the tests wait for 2 secs.
        # otherwise it will return zero docs in the next test.
        service.index_layers(with_cache=False)
        time.sleep(2)

        self.api_url = "{0}{1}".format(
            settings.SITE_URL, reverse("search_api", args=[catalog_test_slug])
        )
        self.default_params = {
            "search_engine": SEARCH_TYPE,
            "search_engine_endpoint": self.search_engine_endpoint,
            "q_time": "[* TO *]",
            "q_geo": "[-90,-180 TO 90,180]",
            "d_docs_limit": 0,
            "d_docs_page": 1,
            "d_docs_sort": "score"
        }

    def test_catalogs(self):
        print('> testing catalogs')
        url = settings.SITE_URL + reverse("catalog-list")
        res = self.client.get(url)
        self.assertEqual(res.status_code, 200)
        catalogs = json.loads(res.content)
        self.assertEqual(len(catalogs), Catalog.objects.all().count())

    def test_all_match_docs(self):
        print('> testing match all docs')
        params = self.default_params
        print("searching on [{}]".format(self.api_url))
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())

    def test_q_text(self):
        print('> testing q text')
        layer = Layer.objects.all()[0]
        params = self.default_params
        params["q_text"] = "title:\"{0}\"".format(layer.title)
        params["d_docs_limit"] = 100

        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)

        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 1)

        for doc in results.get("d.docs", []):
            self.assertEqual(doc["title"], layer.title)

    def test_q_geo(self):
        print('> testing q geo')
        params = self.default_params

        # top right square
        params["q_geo"] = "[0,0 TO 30,30]"
        results = self.client.get(self.api_url, params)

        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 1)

        # bottom left square
        params["q_geo"] = "[-30,-30 TO 0,0]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 1)

        # big square
        params["q_geo"] = "[-30,-30 TO 30,30]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 4)

        # center where no layers
        params["q_geo"] = "[-5,-5 TO 5,5]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 0)

        # bad format
        params["q_geo"] = "[-5,-5 5,5]"
        results = self.client.get(self.api_url, params)
        # validate the format
        print('> testing q geo (format validations)')
        self.assertEqual(results.status_code, 400)

    def test_q_time(self):
        print('> testing q time (format validations)')
        params = self.default_params

        # test validations
        params["q_time"] = "[2000-01-01 - 2001-01-01T00:00:00]"
        results = self.client.get(self.api_url, params)
        # requires [X TO Y]
        self.assertEqual(400, results.status_code)

        print('> testing q time')
        # test asterisks
        # all times
        params["q_time"] = "[* TO *]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        # all records
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())

        # test range
        # entire year 2000
        params["q_time"] = "[2000-01-01 TO 2001-01-01T00:00:00]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        # 1 in year 2000
        self.assertEqual(results["a.matchDocs"], 1)

        # test complete min and max when q time is asterisks
        params["q_time"] = "[* TO *]"
        params["a_time_limit"] = 1
        if SEARCH_TYPE == SEARCH_TYPE_ES:
            # TODO: a_time_limit is WIP in ES, today requires a a_time_gap to be completed.
            params["a_time_gap"] = "P1Y"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            # TODO: fix on Solr or ES? see next TODO.
            # * TO * to first date and last date.
            self.assertEqual(results["a.time"]["start"].upper(), "2000-03-01T00:00:00Z")
            self.assertEqual(results["a.time"]["end"].upper(), "2003-03-01T00:00:00Z")
        else:
            # TODO: ES and SOLR returns facets by default spliting the data yearly. first record is on 2000-03,
            # ES facets are returned from 2000-01... to 2003-01.
            # SOLR facets are returned from 2000-03... to 2003-03.
            # SOLR data seems more accurate since first and last Layers are in month 03.
            # Example: http://panchicore.d.pr/12ESP
            # * TO * to first date and last date.
            self.assertEqual(results["a.time"]["start"].upper(), "2000-01-01T00:00:00Z")
            self.assertEqual(results["a.time"]["end"].upper(), "2003-01-01T00:00:00Z")

        # test facets
        params["q_time"] = "[2000 TO 2022]"
        params["a_time_limit"] = 1
        params["a_time_gap"] = "P1Y"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())
        # 2000 to complete datetime format
        self.assertEqual(results["a.time"]["start"].upper(), "2000-01-01T00:00:00Z")
        # 2022 to complete datetime format

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            # TODO: solr creates entire time span and brings facets with empty entries. (2000 to 2022)
            # fix by removing facets with zero counts?.
            self.assertEqual(results["a.time"]["end"].upper(), "2022-01-01T00:00:00Z")
        else:
            self.assertEqual(results["a.time"]["end"].upper(), "2003-01-01T00:00:00Z")
        # the facet counters are all facets excluding < 2000
        self.assertEqual(len(results["a.time"]["counts"]), Layer.objects.all().count())

    def test_utilities(self):
        print('> testing utilities functions')
        # test_parse_datetime_range
        start, end = utils.parse_datetime_range("[2013-03-01 TO 2014-05-02T23:00:00]")
        self.assertTrue(start.get("is_common_era"))
        self.assertEqual(start.get("parsed_datetime").year, 2013)
        self.assertEqual(start.get("parsed_datetime").month, 3)
        self.assertEqual(start.get("parsed_datetime").day, 1)
        self.assertTrue(end.get("is_common_era"))
        self.assertEqual(end.get("parsed_datetime").year, 2014)
        self.assertEqual(end.get("parsed_datetime").month, 5)
        self.assertEqual(end.get("parsed_datetime").day, 2)
        self.assertEqual(end.get("parsed_datetime").hour, 23)
        self.assertEqual(end.get("parsed_datetime").minute, 0)
        self.assertEqual(end.get("parsed_datetime").second, 0)

        start, end = utils.parse_datetime_range("[-500000000 TO 2014-05-02T23:00:00]")
        self.assertFalse(start.get("is_common_era"))
        self.assertEqual(start.get("parsed_datetime"), "-500000000-01-01T00:00:00Z")

        start, end = utils.parse_datetime_range("[* TO *]")
        self.assertTrue(start.get("is_common_era"))
        self.assertEqual(start.get("parsed_datetime"), None)
        self.assertEqual(end.get("parsed_datetime"), None)

        # test_parse_ISO8601
        quantity, units = utils.parse_ISO8601("P3D")
        self.assertEqual(quantity, 3)
        self.assertEqual(units[0], "DAYS")

        # test_gap_to_sorl
        value = utils.gap_to_sorl("P3D")
        self.assertEqual(value, "+3DAYS")

        # test_parse_geo_box
        value = utils.parse_geo_box("[-90,-180 TO 90,180]")
        self.assertEqual(value.bounds[0], -90)
        self.assertEqual(value.bounds[1], -180)
        self.assertEqual(value.bounds[2], 90)
        self.assertEqual(value.bounds[3], 180)

        # test_request_time_facet
        d = utils.request_time_facet("x", "[2000 TO 2014-01-02T11:12:13]", None, 1000)
        self.assertEqual(type(d), dict)
        self.assertEqual(d['f.x.facet.range.start'], '2000-01-01T00:00:00Z')
        self.assertEqual(d['f.x.facet.range.end'], '2014-01-02T11:12:13Z')
        self.assertEqual(d['f.x.facet.range.gap'], '+6DAYS')
        self.assertEqual(d['facet.range'], 'x')

        d = utils.request_time_facet("y", "[-5000000 TO 2016]", "P1D", 1)
        self.assertEqual(d['f.y.facet.range.start'], '-5000000-01-01T00:00:00Z')
        self.assertEqual(d['f.y.facet.range.end'], '2016-01-01T00:00:00Z')
        self.assertEqual(d['f.y.facet.range.gap'], '+1DAYS')
        self.assertEqual(d['facet.range'], 'y')
class TestBrowser(unittest.TestCase):
    def setUp(self):
        if not SELENIUM_HUB_URL:
            # run test on firefox of this machine.
            self.driver = webdriver.Firefox()
        else:
            # run test on stand alone node machine in docker: selenium-firefox
            self.driver = webdriver.Remote(
                command_executor=SELENIUM_HUB_URL,
                desired_capabilities=DesiredCapabilities.FIREFOX
            )
        self.driver.implicitly_wait(30)
        self.base_url = BROWSER_HYPERMAP_URL
        self.verificationErrors = []
        self.accept_next_alert = True

        print '> clearing SEARCH_URL={0}'.format(SEARCH_URL)
        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            self.solr = SolrHypermap()
            # delete solr documents
            # add the schema
            print '> updating schema'.format(SEARCH_URL)
            self.solr.update_schema(catalog=catalog_test_slug)
            self.solr.clear_solr(catalog=catalog_test_slug)

            self.search_engine_endpoint = '{0}/solr/{1}/select'.format(
                SEARCH_URL, catalog_test_slug
            )
        elif SEARCH_TYPE == SEARCH_TYPE_ES:
            es = ESHypermap()
            # delete ES documents
            es.clear_es()
            self.search_engine_endpoint = '{0}/{1}/_search'.format(
                SEARCH_URL, catalog_test_slug
            )
        else:
            raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)

    def test_browser(self):

        ENDPOINT_FILE = os.path.join("/usr/src/app",
                                     "hypermap",
                                     "tests",
                                     "mesonet.agron.iastate.edu.txt")

        print ""
        print ">>> with env:"
        print "REGISTRY_SKIP_CELERY: %s" % settings.REGISTRY_SKIP_CELERY
        print "REGISTRY_LIMIT_LAYERS: %s" % settings.REGISTRY_LIMIT_LAYERS
        print "REGISTRY_CHECK_PERIOD: %s" % settings.REGISTRY_CHECK_PERIOD
        print ""
        print "SELENIUM_HUB_URL: %s" % SELENIUM_HUB_URL
        print "BROWSER_HYPERMAP_URL: %s" % BROWSER_HYPERMAP_URL
        print "BROWSER_SEARCH_URL: %s" % BROWSER_SEARCH_URL
        print "BROWSER_MAPLOOM_URL: %s" % BROWSER_MAPLOOM_URL
        print "WAIT_FOR_CELERY_JOB_PERIOD: %s" % WAIT_FOR_CELERY_JOB_PERIOD
        print "ENDPOINT FILE: %s" % ENDPOINT_FILE
        print ""
        print "Starting..."

        driver = self.driver
        time.sleep(3)

        driver.get(self.base_url + "/admin/login/?next=/admin/")
        print driver.current_url
        driver.find_element_by_id("id_password").clear()
        driver.find_element_by_id("id_password").send_keys("admin")
        driver.find_element_by_id("id_username").clear()
        driver.find_element_by_id("id_username").send_keys("admin")
        driver.find_element_by_css_selector("input[type=\"submit\"]").click()
        print driver.current_url
        driver.find_element_by_link_text("Periodic tasks").click()
        print driver.current_url
        print "> assert 3 periodic tasks. means beat is alive."
        self.assertEqual("3 periodic tasks",
                         driver.find_element_by_css_selector(
                             "p.paginator").text)
        driver.find_element_by_link_text("Home").click()
        print driver.current_url
        driver.find_element_by_link_text("Endpoint lists").click()
        print driver.current_url
        driver.find_element_by_link_text("Add endpoint list").click()
        print driver.current_url
        print "> uploading Endpoint List..."
        driver.find_element_by_id("id_upload").clear()
        driver.find_element_by_id("id_upload").send_keys(ENDPOINT_FILE)
        driver.find_element_by_name("_save").click()
        print driver.current_url

        print "> waiting {0} seconds for celery do the job....".format(
            WAIT_FOR_CELERY_JOB_PERIOD
        )
        time.sleep(WAIT_FOR_CELERY_JOB_PERIOD)

        driver.find_element_by_link_text("Aggregator").click()
        time.sleep(1)
        print driver.current_url
        driver.find_element_by_link_text("Endpoints").click()
        print driver.current_url
        print "> assert Endpoint created."
        time.sleep(1)
        self.assertEqual(
            "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi",
            driver.find_element_by_link_text(
                "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi").text)
        driver.find_element_by_link_text(
            "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi").click()
        # self.assertEqual("1 service/s created", driver.find_element_by_id("id_message").text)
        driver.find_element_by_link_text("Endpoints").click()
        print driver.current_url
        time.sleep(1)
        driver.find_element_by_link_text("Aggregator").click()
        print driver.current_url
        time.sleep(1)
        driver.find_element_by_link_text("Services").click()
        print driver.current_url
        print "> assert 1 Service created."
        time.sleep(1)
        self.assertEqual("1 service", driver.find_element_by_css_selector(
            "p.paginator").text)
        self.assertEqual(
            "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi",
            driver.find_element_by_css_selector("td.field-url").text)
        driver.find_element_by_link_text("1").click()
        print driver.current_url
        print "> assert Service details."
        time.sleep(1)
        try:
            self.assertEqual("IEM NWS Warnings WMS Service",
                             driver.find_element_by_id(
                                 "id_title").get_attribute("value"))
        except AssertionError as e:
            self.verificationErrors.append(str(e))
        driver.find_element_by_link_text("Services").click()
        print driver.current_url
        driver.find_element_by_link_text("Aggregator").click()
        print driver.current_url
        driver.find_element_by_link_text("Layers").click()
        print driver.current_url
        print "> assert 3 layers created."
        time.sleep(1)
        self.assertEqual("3 layers", driver.find_element_by_css_selector(
            "p.paginator").text)
        driver.get(self.base_url + "/registry/")
        print driver.current_url
        print "> go to /registry/."

        for i in range(1, 11):
            print "> try assert checks count > 0. (%i of 10)" % i
            try:
                self.assertNotEqual("0", driver.find_element_by_xpath(
                    "//td[4]").text)
                print "> found"
                break
            except AssertionError as e:
                print "> wait and reload page"
                time.sleep(10)
                driver.get(self.base_url + "/registry/")

        try:
            self.assertNotEqual("0",
                                driver.find_element_by_xpath("//td[4]").text)
        except AssertionError as e:
            self.verificationErrors.append(str(e))

        driver.get("{0}/hypermap/_count".format(BROWSER_SEARCH_URL))
        print driver.current_url
        time.sleep(2)

        for i in range(1, 11):
            print "> assert layers indexed are 3. (%i of 10)" % i
            try:
                self.assertRegexpMatches(
                    driver.find_element_by_css_selector("pre").text,
                    "^\\{\"count\":3[\\s\\S]*$")
                print "> found"
                break
            except AssertionError:
                print "> wait and reload page"
                time.sleep(10)
                driver.refresh()

        self.assertRegexpMatches(
            driver.find_element_by_css_selector("pre").text,
            "^\\{\"count\":3[\\s\\S]*$")

        driver.get(self.base_url + "/registry/")
        print driver.current_url
        driver.find_element_by_link_text(
            "IEM NWS Warnings WMS Service").click()
        print driver.current_url
        print "> remove checks."
        driver.find_element_by_name("remove").click()
        print driver.current_url
        driver.find_element_by_link_text("Home").click()
        print driver.current_url
        print "> assert checks = 0."
        self.assertEqual("0", driver.find_element_by_xpath("//td[4]").text)
        driver.find_element_by_link_text(
            "IEM NWS Warnings WMS Service").click()
        print driver.current_url
        print "> trigger check."
        driver.find_element_by_name("check").click()
        print driver.current_url
        driver.find_element_by_link_text("Home").click()
        print driver.current_url

        for i in range(1, 11):
            try:
                print "> assert checks = 1. (%i of 10)" % i
                self.assertTrue(
                    int(driver.find_element_by_xpath("//td[4]").text) > 0)
                print "> found"
                break
            except AssertionError:
                print "> wait and reload page"
                time.sleep(10)
                driver.refresh()

        driver.find_element_by_link_text(
            "IEM NWS Warnings WMS Service").click()
        print driver.current_url
        driver.find_element_by_link_text("wwa").click()
        print driver.current_url
        print "> remove checks from Layer."
        driver.find_element_by_name("remove").click()
        print driver.current_url
        print "> assert text [No checks performed so far]."
        self.assertEqual("No checks performed so far",
                         driver.find_element_by_xpath("//tr[11]/td[2]").text)
        print "> check Layer."
        driver.find_element_by_name("check").click()
        print driver.current_url

        for i in range(1, 11):
            try:
                print "> assert text [Total Checks: N>0]. (%i of 10)" % i
                src = driver.page_source
                text_found_TOTAL_CHECKS_LTE_1 = re.search(
                    r'Total Checks: (1|2|3|4|5|6|7)', src)
                self.assertNotEqual(text_found_TOTAL_CHECKS_LTE_1, None)
                print "> found"
                break
            except AssertionError:
                print "> wait and reload page"
                time.sleep(10)
                driver.get(driver.current_url)

        src = driver.page_source
        text_found_TOTAL_CHECKS_LTE_1 = re.search(
            r'Total Checks: (1|2|3|4|5|6|7)', src)
        self.assertNotEqual(text_found_TOTAL_CHECKS_LTE_1, None)

        driver.find_element_by_link_text("Home").click()
        print driver.current_url
        driver.find_element_by_link_text("Monitor").click()
        print driver.current_url
        print "> clean Search index and wait"
        driver.find_element_by_name("clear_index").click()
        print driver.current_url
        time.sleep(5)
        driver.get("{0}/hypermap/_count".format(BROWSER_SEARCH_URL))
        print driver.current_url
        print "> assert count != 3 layers"
        try:
            self.assertNotRegexpMatches(
                driver.find_element_by_css_selector("pre").text,
                "^\\{\"count\":3[\\s\\S]*$")
        except AssertionError as e:
            self.verificationErrors.append(str(e))
        driver.get(self.base_url + "/registry/")
        print driver.current_url
        print "> finish hypermap page"
        print ""

        # TODO: activate this to test maploom, now dat app looks very buggy.
        """
        print ">> start maploom"
        driver.get(BROWSER_MAPLOOM_URL)
        print driver.current_url
        print ">> open registry modal"
        driver.find_element_by_xpath(
            "//div[@id='pulldown-content']/div[2]/div/div").click()
        print ">> assert Hypermap catalog"
        time.sleep(10)
        self.assertEqual("Hypermap",
                         driver.find_element_by_xpath(
                             "//div[@id='explore']/div/nav/div/form/div/div[2]/select").text)
        print ">> assert [Showing 3 of 3 - Page 1 / 1]"
        self.assertEqual("Showing 3 of 3 - Page 1 / 1".lower(),
                         driver.find_element_by_css_selector(
                             "span.text-muted.ng-binding").text.lower())
        driver.find_element_by_id("text_search_input_exp").clear()
        print ">> search IEM"
        driver.find_element_by_id("text_search_input_exp").send_keys("IEM")
        driver.find_element_by_id("text_search_btn").click()
        time.sleep(10)
        print ">> assert [Showing 1 of 1 - Page 1 / 1]"
        self.assertEqual("Showing 1 of 1 - Page 1 / 1".lower(),
                         driver.find_element_by_css_selector(
                             "span.text-muted.ng-binding").text.lower())
        print ">> click reset"
        driver.find_element_by_name("button").click()
        time.sleep(10)
        print ">> assert [Showing 3 of 3 - Page 1 / 1]"
        self.assertEqual("Showing 3 of 3 - Page 1 / 1".lower(),
                         driver.find_element_by_css_selector(
                             "span.text-muted.ng-binding").text.lower())
        print ">> click on 3 layers to select"
        driver.find_element_by_css_selector("td.ellipsis.ng-binding").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[3]/td").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[4]/td").click()
        print ">> click on 3 layers to unselect"
        driver.find_element_by_css_selector("td.ellipsis.ng-binding").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[3]/td").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[4]/td").click()
        """

    def is_element_present(self, how, what):
        try:
            self.driver.find_element(by=how, value=what)
        except NoSuchElementException as e:
            print e
            return False
        return True

    def is_alert_present(self):
        try:
            self.driver.switch_to_alert()
        except NoAlertPresentException as e:
            print e
            return False
        return True

    def close_alert_and_get_its_text(self):
        try:
            alert = self.driver.switch_to_alert()
            alert_text = alert.text
            if self.accept_next_alert:
                alert.accept()
            else:
                alert.dismiss()
            return alert_text
        finally:
            self.accept_next_alert = True

    def tearDown(self):
        self.driver.quit()
        self.assertEqual([], self.verificationErrors)
class TestBrowser(unittest.TestCase):
    def setUp(self):
        if not SELENIUM_HUB_URL:
            # run test on firefox of this machine.
            self.driver = webdriver.Firefox()
        else:
            # run test on stand alone node machine in docker: selenium-firefox
            self.driver = webdriver.Remote(
                command_executor=SELENIUM_HUB_URL,
                desired_capabilities=DesiredCapabilities.FIREFOX
            )
        self.driver.implicitly_wait(30)
        self.base_url = BROWSER_HYPERMAP_URL
        self.verificationErrors = []
        self.accept_next_alert = True

        print '> clearing SEARCH_URL={0}'.format(SEARCH_URL)
        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            self.solr = SolrHypermap()
            # delete solr documents
            # add the schema
            print '> updating schema'.format(SEARCH_URL)
            self.solr.update_schema(catalog=catalog_test_slug)
            self.solr.clear_solr(catalog=catalog_test_slug)

            self.search_engine_endpoint = '{0}/solr/{1}/select'.format(
                SEARCH_URL, catalog_test_slug
            )
        elif SEARCH_TYPE == SEARCH_TYPE_ES:
            es = ESHypermap()
            # delete ES documents
            es.clear_es()
            self.search_engine_endpoint = '{0}/{1}/_search'.format(
                SEARCH_URL, catalog_test_slug
            )
        else:
            raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)

    def test_browser(self):

        ENDPOINT_FILE = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                     "mesonet.agron.iastate.edu.txt")

        print ""
        print ">>> with env:"
        print "REGISTRY_SKIP_CELERY: %s" % settings.REGISTRY_SKIP_CELERY
        print "REGISTRY_LIMIT_LAYERS: %s" % settings.REGISTRY_LIMIT_LAYERS
        print "REGISTRY_CHECK_PERIOD: %s" % settings.REGISTRY_CHECK_PERIOD
        print ""
        print "SELENIUM_HUB_URL: %s" % SELENIUM_HUB_URL
        print "BROWSER_HYPERMAP_URL: %s" % BROWSER_HYPERMAP_URL
        print "BROWSER_SEARCH_URL: %s" % BROWSER_SEARCH_URL
        print "BROWSER_MAPLOOM_URL: %s" % BROWSER_MAPLOOM_URL
        print "WAIT_FOR_CELERY_JOB_PERIOD: %s" % WAIT_FOR_CELERY_JOB_PERIOD
        print "ENDPOINT FILE: %s" % ENDPOINT_FILE
        print ""
        print "Starting..."

        driver = self.driver
        time.sleep(3)

        driver.get(self.base_url + "/admin/login/?next=/admin/")
        print driver.current_url
        driver.find_element_by_id("id_password").clear()
        driver.find_element_by_id("id_password").send_keys("admin")
        driver.find_element_by_id("id_username").clear()
        driver.find_element_by_id("id_username").send_keys("admin")
        driver.find_element_by_css_selector("input[type=\"submit\"]").click()
        print driver.current_url
        driver.find_element_by_link_text("Periodic tasks").click()
        print driver.current_url
        print "> assert 3 periodic tasks. means beat is alive."
        self.assertEqual("3 periodic tasks",
                         driver.find_element_by_css_selector(
                             "p.paginator").text)
        driver.find_element_by_link_text("Home").click()
        print driver.current_url
        driver.find_element_by_link_text("Endpoint lists").click()
        print driver.current_url
        driver.find_element_by_link_text("Add endpoint list").click()
        print driver.current_url
        print "> uploading Endpoint List..."
        driver.find_element_by_id("id_upload").clear()
        driver.find_element_by_id("id_upload").send_keys(ENDPOINT_FILE)
        driver.find_element_by_name("_save").click()
        print driver.current_url

        print "> waiting {0} seconds for celery do the job....".format(
            WAIT_FOR_CELERY_JOB_PERIOD
        )
        time.sleep(WAIT_FOR_CELERY_JOB_PERIOD)

        driver.find_element_by_link_text("Aggregator").click()
        time.sleep(1)
        print driver.current_url
        driver.find_element_by_link_text("Endpoints").click()
        print driver.current_url
        print "> assert Endpoint created."
        time.sleep(1)
        self.assertEqual(
            "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi",
            driver.find_element_by_link_text(
                "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi").text)
        driver.find_element_by_link_text(
            "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi").click()
        # self.assertEqual("1 service/s created", driver.find_element_by_id("id_message").text)
        driver.find_element_by_link_text("Endpoints").click()
        print driver.current_url
        time.sleep(1)
        driver.find_element_by_link_text("Aggregator").click()
        print driver.current_url
        time.sleep(1)
        driver.find_element_by_link_text("Services").click()
        print driver.current_url
        print "> assert 1 Service created."
        time.sleep(1)
        self.assertEqual("1 service", driver.find_element_by_css_selector(
            "p.paginator").text)
        self.assertEqual(
            "http://mesonet.agron.iastate.edu/cgi-bin/wms/us/wwa.cgi",
            driver.find_element_by_css_selector("td.field-url").text)
        driver.find_element_by_xpath(
            '//*[@id="result_list"]/tbody/tr/th/a').click()
        print driver.current_url
        print "> assert Service details."
        time.sleep(1)

        self.assertEqual("IEM NWS Warnings WMS Service",
                         driver.find_element_by_id(
                             "id_title").get_attribute("value"))

        driver.find_element_by_link_text("Services").click()
        print driver.current_url
        driver.find_element_by_link_text("Aggregator").click()
        print driver.current_url
        driver.find_element_by_link_text("Layers").click()
        print driver.current_url
        print "> assert 3 layers created."
        time.sleep(1)
        self.assertEqual("3 layers", driver.find_element_by_css_selector(
            "p.paginator").text)
        driver.get(self.base_url + "/registry/")
        print driver.current_url
        print "> go to /registry/."

        for i in range(1, 11):
            print "> try assert checks count > 0. (%i of 10)" % i
            try:
                self.assertNotEqual("0", driver.find_element_by_xpath(
                    "//td[4]").text)
                print "> found"
                break
            except AssertionError as e:
                print "> wait and reload page"
                time.sleep(10)
                driver.get(self.base_url + "/registry/")

        try:
            self.assertNotEqual("0",
                                driver.find_element_by_xpath("//td[4]").text)
        except AssertionError as e:
            self.verificationErrors.append(str(e))

        driver.get("{0}/hypermap/_count".format(BROWSER_SEARCH_URL))
        print driver.current_url
        time.sleep(2)

        for i in range(1, 11):
            print "> assert layers indexed are 3. (%i of 10)" % i
            try:
                self.assertRegexpMatches(
                    driver.find_element_by_css_selector("pre").text,
                    "^\\{\"count\":3[\\s\\S]*$")
                print "> found"
                break
            except AssertionError:
                print "> wait and reload page"
                time.sleep(10)
                driver.refresh()

        self.assertRegexpMatches(
            driver.find_element_by_css_selector("pre").text,
            "^\\{\"count\":3[\\s\\S]*$")

        driver.get(self.base_url + "/registry/")
        print driver.current_url
        driver.find_element_by_link_text(
            "IEM NWS Warnings WMS Service").click()
        print driver.current_url
        print "> remove checks."
        driver.find_element_by_name("remove").click()
        print driver.current_url
        driver.find_element_by_link_text("Home").click()
        print driver.current_url
        print "> assert checks = 0."
        self.assertEqual("0", driver.find_element_by_xpath("//td[4]").text)
        driver.find_element_by_link_text(
            "IEM NWS Warnings WMS Service").click()
        print driver.current_url
        print "> trigger check."
        driver.find_element_by_name("check").click()
        print driver.current_url
        driver.find_element_by_link_text("Home").click()
        print driver.current_url

        for i in range(1, 11):
            try:
                print "> assert checks = 1. (%i of 10)" % i
                self.assertTrue(
                    int(driver.find_element_by_xpath("//td[4]").text) > 0)
                print "> found"
                break
            except AssertionError:
                print "> wait and reload page"
                time.sleep(10)
                driver.refresh()

        driver.find_element_by_link_text(
            "IEM NWS Warnings WMS Service").click()
        print driver.current_url
        driver.find_element_by_link_text("wwa").click()
        print driver.current_url
        print "> remove checks from Layer."
        driver.find_element_by_name("remove").click()
        print driver.current_url
        print "> assert text [No checks performed so far]."
        self.assertEqual("No checks performed so far",
                         driver.find_element_by_xpath("//tr[11]/td[2]").text)
        print "> check Layer."
        driver.find_element_by_name("check").click()
        print driver.current_url

        for i in range(1, 11):
            try:
                print "> assert text [Total Checks: N>0]. (%i of 10)" % i
                src = driver.page_source
                text_found_TOTAL_CHECKS_LTE_1 = re.search(
                    r'Total Checks: (1|2|3|4|5|6|7)', src)
                self.assertNotEqual(text_found_TOTAL_CHECKS_LTE_1, None)
                print "> found"
                break
            except AssertionError:
                print "> wait and reload page"
                time.sleep(10)
                driver.get(driver.current_url)

        src = driver.page_source
        text_found_TOTAL_CHECKS_LTE_1 = re.search(
            r'Total Checks: (1|2|3|4|5|6|7)', src)
        self.assertNotEqual(text_found_TOTAL_CHECKS_LTE_1, None)

        driver.find_element_by_link_text("Home").click()
        print driver.current_url
        driver.find_element_by_link_text("Monitor").click()
        print driver.current_url
        print "> clean Search index and wait"
        driver.find_element_by_name("clear_index").click()
        print driver.current_url
        time.sleep(5)
        driver.get("{0}/hypermap/_count".format(BROWSER_SEARCH_URL))
        print driver.current_url
        print "> assert count != 3 layers"
        try:
            self.assertNotRegexpMatches(
                driver.find_element_by_css_selector("pre").text,
                "^\\{\"count\":3[\\s\\S]*$")
        except AssertionError as e:
            self.verificationErrors.append(str(e))
        driver.get(self.base_url + "/registry/")
        print driver.current_url
        print "> finish hypermap page"
        print ""

        # TODO: activate this to test maploom, now dat app looks very buggy.
        """
        print ">> start maploom"
        driver.get(BROWSER_MAPLOOM_URL)
        print driver.current_url
        print ">> open registry modal"
        driver.find_element_by_xpath(
            "//div[@id='pulldown-content']/div[2]/div/div").click()
        print ">> assert Hypermap catalog"
        time.sleep(10)
        self.assertEqual("Hypermap",
                         driver.find_element_by_xpath(
                             "//div[@id='explore']/div/nav/div/form/div/div[2]/select").text)
        print ">> assert [Showing 3 of 3 - Page 1 / 1]"
        self.assertEqual("Showing 3 of 3 - Page 1 / 1".lower(),
                         driver.find_element_by_css_selector(
                             "span.text-muted.ng-binding").text.lower())
        driver.find_element_by_id("text_search_input_exp").clear()
        print ">> search IEM"
        driver.find_element_by_id("text_search_input_exp").send_keys("IEM")
        driver.find_element_by_id("text_search_btn").click()
        time.sleep(10)
        print ">> assert [Showing 1 of 1 - Page 1 / 1]"
        self.assertEqual("Showing 1 of 1 - Page 1 / 1".lower(),
                         driver.find_element_by_css_selector(
                             "span.text-muted.ng-binding").text.lower())
        print ">> click reset"
        driver.find_element_by_name("button").click()
        time.sleep(10)
        print ">> assert [Showing 3 of 3 - Page 1 / 1]"
        self.assertEqual("Showing 3 of 3 - Page 1 / 1".lower(),
                         driver.find_element_by_css_selector(
                             "span.text-muted.ng-binding").text.lower())
        print ">> click on 3 layers to select"
        driver.find_element_by_css_selector("td.ellipsis.ng-binding").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[3]/td").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[4]/td").click()
        print ">> click on 3 layers to unselect"
        driver.find_element_by_css_selector("td.ellipsis.ng-binding").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[3]/td").click()
        driver.find_element_by_xpath(
            "//div[@id='registry-layers']/div/div/div/div[2]/div[2]/div/table/tbody/tr[4]/td").click()
        """

    def is_element_present(self, how, what):
        try:
            self.driver.find_element(by=how, value=what)
        except NoSuchElementException as e:
            print e
            return False
        return True

    def is_alert_present(self):
        try:
            self.driver.switch_to_alert()
        except NoAlertPresentException as e:
            print e
            return False
        return True

    def close_alert_and_get_its_text(self):
        try:
            alert = self.driver.switch_to_alert()
            alert_text = alert.text
            if self.accept_next_alert:
                alert.accept()
            else:
                alert.dismiss()
            return alert_text
        finally:
            self.accept_next_alert = True

    def tearDown(self):
        self.driver.quit()
        self.assertEqual([], self.verificationErrors)
Exemplo n.º 6
0
def clear_solr():
    print 'Clearing the solr core and indexes'
    from hypermap.aggregator.solr import SolrHypermap
    solrobject = SolrHypermap()
    solrobject.clear_solr()
Exemplo n.º 7
0
class TestCSWTransactions(unittest.TestCase):
    def setUp(self):
        self.client = Client()
        user = User.objects.create(username='******')
        user.set_password('admin')
        user.save()
        self.client.login(username="******", password="******")

        Catalog.objects.get_or_create(name=catalog_test_slug)

        Layer.objects.all().delete()
        Service.objects.all().delete()

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            self.solr = SolrHypermap()
            self.solr.update_schema(catalog=catalog_test_slug)
            self.solr.clear_solr(catalog=catalog_test_slug)
        elif SEARCH_TYPE == SEARCH_TYPE_ES:
            es = ESHypermap()
            es.clear_es()
        else:
            raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)

    def test_post(self):
        """
        test CSV transactions.
        :return:
        """

        print("")
        print(">>> with env:")
        print("REGISTRY_SKIP_CELERY: %s" % settings.REGISTRY_SKIP_CELERY)
        print("REGISTRY_LIMIT_LAYERS: %s" % settings.REGISTRY_LIMIT_LAYERS)
        print("REGISTRY_CHECK_PERIOD: %s" % settings.REGISTRY_CHECK_PERIOD)
        print("REGISTRY_SEARCH_URL: %s" % settings.REGISTRY_SEARCH_URL)
        print("REGISTRY_HARVEST_SERVICES: %s" %
              settings.REGISTRY_HARVEST_SERVICES)
        print("")

        # Post the 10 Layers contained in this file: data/cswt_insert.xml
        path = os.path.join(settings.PROJECT_DIR, "..", "data",
                            "cswt_insert.xml")
        payload = open(path, 'rb').read()
        content_type = "application/xml"

        url = "/registry/{0}/csw".format(catalog_test_slug)

        res = self.client.post(url, data=payload, content_type=content_type)
        self.assertEqual(res.status_code, 200)
        self.assertEqual(Layer.objects.all().count(), 10)

        # List the Layers posted above
        url = "/registry/{0}/csw?service=CSW&version=2.0.2&request=" \
              "GetRecords&typenames=csw:Record&elementsetname=full&" \
              "resulttype=results".format(catalog_test_slug)
        res = self.client.get(url)

        self.assertEqual(res.status_code, 200)
        self.assertEqual(res.content.count("Airports (OSM)"), 1)
        self.assertEqual(res.content.count("Manaus Roads (OSM May 2016)"), 2)

        # Search one Layer posted above
        url = "/registry/{0}/csw?mode=opensearch&service=CSW&version" \
              "=2.0.2&request=GetRecords&elementsetname=full&typenames=" \
              "csw:Record&resulttype=results" \
              "&q=Airport".format(catalog_test_slug)

        res = self.client.get(url)

        self.assertEqual(res.status_code, 200)
        self.assertEqual(res.content.count("Airports (OSM)"), 1)

        # Flush layers in the cache.
        index_cached_layers()

        # Give celery some time.
        time.sleep(3)

        # are Layers in index?
        url = "{0}hypermap/_search".format(SEARCH_URL)
        res = requests.get(url)
        results_ok_in_search_backend = res.json()
        self.assertTrue("hits" in results_ok_in_search_backend)
        self.assertTrue("total" in results_ok_in_search_backend["hits"])
        self.assertEqual(results_ok_in_search_backend["hits"]["total"], 10)

    def tearDown(self):
        pass
Exemplo n.º 8
0
def clear_solr():
    print 'Clearing the solr core and indexes'
    from hypermap.aggregator.solr import SolrHypermap
    solrobject = SolrHypermap()
    solrobject.clear_solr()
Exemplo n.º 9
0
class SearchApiTestCase(TestCase):
    """
    run me
    python manage.py test hypermap.search_api --settings=hypermap.settings.test --failfast
    """

    def tearDown(self):
        signals.post_save.connect(layer_post_save, sender=Layer)
        signals.post_save.connect(service_post_save, sender=Service)

    def setUp(self):
        signals.post_save.disconnect(layer_post_save, sender=Layer)
        signals.post_save.disconnect(service_post_save, sender=Service)

        catalog_test_slug = "hypermap"

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            self.solr = SolrHypermap()
            # delete solr documents
            # add the schema
            print '> updating schema'.format(SEARCH_URL)
            self.solr.update_schema(catalog=catalog_test_slug)
            print '> clearing SEARCH_URL={0}'.format(SEARCH_URL)
            self.solr.clear_solr(catalog=catalog_test_slug)

            self.search_engine_endpoint = '{0}/solr/{1}/select'.format(
                SEARCH_URL, catalog_test_slug
            )
        elif SEARCH_TYPE == SEARCH_TYPE_ES:
            es = ESHypermap()
            # delete ES documents
            es.clear_es()
            self.search_engine_endpoint = '{0}/{1}/_search'.format(
                SEARCH_URL, catalog_test_slug
            )
        else:
            raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)

        catalog, created = Catalog.objects.get_or_create(
            name=catalog_test_slug
        )

        service = Service(
            url='http://fakeurl.com',
            title='Title',
            type='OGC:WMS',
            catalog=catalog
        )
        service.save()

        layer = Layer(
            name='Layer 1',
            bbox_x0=-40.0,
            bbox_x1=-20.0,
            bbox_y0=-40.0,
            bbox_y1=-20.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2000, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        layer = Layer(
            name='Layer 2',
            bbox_x0=-40.0,
            bbox_x1=-20.0,
            bbox_y0=20.0,
            bbox_y1=40.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2001, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        layer = Layer(
            name='Layer 3',
            bbox_x0=20.0,
            bbox_x1=40.0,
            bbox_y0=20.0,
            bbox_y1=40.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2002, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        layer = Layer(
            name='Layer 4',
            bbox_x0=20.0,
            bbox_x1=40.0,
            bbox_y0=-40.0,
            bbox_y1=-20.0,
            service=service,
            catalog=catalog
        )
        layer.title = layer.name
        layer.save()
        layer.created = datetime.datetime(2003, 3, 1, 0, 0, 0)
        layer.save()
        service.layer_set.add(layer)

        # solr have commitWithin 1500.
        # before to proceed with the tests wait for 2 secs.
        # otherwise it will return zero docs in the next test.
        service.index_layers(with_cache=False)
        time.sleep(2)

        self.api_url = "{0}{1}".format(
            settings.SITE_URL, reverse("search_api", args=[catalog_test_slug])
        )
        self.default_params = {
            "search_engine": SEARCH_TYPE,
            "search_engine_endpoint": self.search_engine_endpoint,
            "q_time": "[* TO *]",
            "q_geo": "[-90,-180 TO 90,180]",
            "d_docs_limit": 0,
            "d_docs_page": 1,
            "d_docs_sort": "score"
        }

    def test_catalogs(self):
        print '> testing catalogs'
        url = settings.SITE_URL + reverse("catalog-list")
        res = self.client.get(url)
        self.assertEqual(res.status_code, 200)
        catalogs = json.loads(res.content)
        self.assertEqual(len(catalogs), Catalog.objects.all().count())

    def test_all_match_docs(self):
        print '> testing match all docs'
        params = self.default_params
        print "searching on [{}]".format(self.api_url)
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())

    def test_q_text(self):
        print '> testing q text'
        layer = Layer.objects.all()[0]
        params = self.default_params
        params["q_text"] = "title:\"{0}\"".format(layer.title)
        params["d_docs_limit"] = 100

        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)

        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 1)

        for doc in results.get("d.docs", []):
            self.assertEqual(doc["title"], layer.title)

    def test_q_geo(self):
        print '> testing q geo'
        params = self.default_params

        # top right square
        params["q_geo"] = "[0,0 TO 30,30]"
        results = self.client.get(self.api_url, params)

        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 1)

        # bottom left square
        params["q_geo"] = "[-30,-30 TO 0,0]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 1)

        # big square
        params["q_geo"] = "[-30,-30 TO 30,30]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 4)

        # center where no layers
        params["q_geo"] = "[-5,-5 TO 5,5]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], 0)

        # bad format
        params["q_geo"] = "[-5,-5 5,5]"
        results = self.client.get(self.api_url, params)
        # validate the format
        print '> testing q geo (format validations)'
        self.assertEqual(results.status_code, 400)

    def test_q_time(self):
        print '> testing q time (format validations)'
        params = self.default_params

        # test validations
        params["q_time"] = "[2000-01-01 - 2001-01-01T00:00:00]"
        results = self.client.get(self.api_url, params)
        # requires [X TO Y]
        self.assertEqual(400, results.status_code)

        print '> testing q time'
        # test asterisks
        # all times
        params["q_time"] = "[* TO *]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        # all records
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())

        # test range
        # entire year 2000
        params["q_time"] = "[2000-01-01 TO 2001-01-01T00:00:00]"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        # 1 in year 2000
        self.assertEqual(results["a.matchDocs"], 1)

        # test complete min and max when q time is asterisks
        params["q_time"] = "[* TO *]"
        params["a_time_limit"] = 1
        if SEARCH_TYPE == SEARCH_TYPE_ES:
            # TODO: a_time_limit is WIP in ES, today requires a a_time_gap to be completed.
            params["a_time_gap"] = "P1Y"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            # TODO: fix on Solr or ES? see next TODO.
            # * TO * to first date and last date.
            self.assertEqual(results["a.time"]["start"].upper(), "2000-03-01T00:00:00Z")
            self.assertEqual(results["a.time"]["end"].upper(), "2003-03-01T00:00:00Z")
        else:
            # TODO: ES and SOLR returns facets by default spliting the data yearly. first record is on 2000-03,
            # ES facets are returned from 2000-01... to 2003-01.
            # SOLR facets are returned from 2000-03... to 2003-03.
            # SOLR data seems more accurate since first and last Layers are in month 03.
            # Example: http://panchicore.d.pr/12ESP
            # * TO * to first date and last date.
            self.assertEqual(results["a.time"]["start"].upper(), "2000-01-01T00:00:00Z")
            self.assertEqual(results["a.time"]["end"].upper(), "2003-01-01T00:00:00Z")

        # test facets
        params["q_time"] = "[2000 TO 2022]"
        params["a_time_limit"] = 1
        params["a_time_gap"] = "P1Y"
        results = self.client.get(self.api_url, params)
        self.assertEqual(results.status_code, 200)
        results = json.loads(results.content)
        self.assertEqual(results["a.matchDocs"], Layer.objects.all().count())
        # 2000 to complete datetime format
        self.assertEqual(results["a.time"]["start"].upper(), "2000-01-01T00:00:00Z")
        # 2022 to complete datetime format

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            # TODO: solr creates entire time span and brings facets with empty entries. (2000 to 2022)
            # fix by removing facets with zero counts?.
            self.assertEqual(results["a.time"]["end"].upper(), "2022-01-01T00:00:00Z")
        else:
            self.assertEqual(results["a.time"]["end"].upper(), "2003-01-01T00:00:00Z")
        # the facet counters are all facets excluding < 2000
        self.assertEqual(len(results["a.time"]["counts"]), Layer.objects.all().count())

    def test_utilities(self):
        print '> testing utilities functions'
        # test_parse_datetime_range
        start, end = utils.parse_datetime_range("[2013-03-01 TO 2014-05-02T23:00:00]")
        self.assertTrue(start.get("is_common_era"))
        self.assertEqual(start.get("parsed_datetime").year, 2013)
        self.assertEqual(start.get("parsed_datetime").month, 3)
        self.assertEqual(start.get("parsed_datetime").day, 1)
        self.assertTrue(end.get("is_common_era"))
        self.assertEqual(end.get("parsed_datetime").year, 2014)
        self.assertEqual(end.get("parsed_datetime").month, 5)
        self.assertEqual(end.get("parsed_datetime").day, 2)
        self.assertEqual(end.get("parsed_datetime").hour, 23)
        self.assertEqual(end.get("parsed_datetime").minute, 0)
        self.assertEqual(end.get("parsed_datetime").second, 0)

        start, end = utils.parse_datetime_range("[-500000000 TO 2014-05-02T23:00:00]")
        self.assertFalse(start.get("is_common_era"))
        self.assertEqual(start.get("parsed_datetime"), "-500000000-01-01T00:00:00Z")

        start, end = utils.parse_datetime_range("[* TO *]")
        self.assertTrue(start.get("is_common_era"))
        self.assertEqual(start.get("parsed_datetime"), None)
        self.assertEqual(end.get("parsed_datetime"), None)

        # test_parse_ISO8601
        quantity, units = utils.parse_ISO8601("P3D")
        self.assertEqual(quantity, 3)
        self.assertEqual(units[0], "DAYS")

        # test_gap_to_sorl
        value = utils.gap_to_sorl("P3D")
        self.assertEqual(value, "+3DAYS")

        # test_parse_geo_box
        value = utils.parse_geo_box("[-90,-180 TO 90,180]")
        self.assertEqual(value.bounds[0], -90)
        self.assertEqual(value.bounds[1], -180)
        self.assertEqual(value.bounds[2], 90)
        self.assertEqual(value.bounds[3], 180)

        # test_request_time_facet
        d = utils.request_time_facet("x", "[2000 TO 2014-01-02T11:12:13]", None, 1000)
        self.assertEqual(type(d), dict)
        self.assertEqual(d['f.x.facet.range.start'], '2000-01-01T00:00:00Z')
        self.assertEqual(d['f.x.facet.range.end'], '2014-01-02T11:12:13Z')
        self.assertEqual(d['f.x.facet.range.gap'], '+6DAYS')
        self.assertEqual(d['facet.range'], 'x')

        d = utils.request_time_facet("y", "[-5000000 TO 2016]", "P1D", 1)
        self.assertEqual(d['f.y.facet.range.start'], '-5000000-01-01T00:00:00Z')
        self.assertEqual(d['f.y.facet.range.end'], '2016-01-01T00:00:00Z')
        self.assertEqual(d['f.y.facet.range.gap'], '+1DAYS')
        self.assertEqual(d['facet.range'], 'y')
class TestCSWTransactions(unittest.TestCase):
    def setUp(self):
        self.client = Client()
        user = User.objects.create(username='******')
        user.set_password('admin')
        user.save()
        self.client.login(username="******", password="******")

        Catalog.objects.get_or_create(
            name=catalog_test_slug
        )

        Layer.objects.all().delete()
        Service.objects.all().delete()

        if SEARCH_TYPE == SEARCH_TYPE_SOLR:
            self.solr = SolrHypermap()
            self.solr.update_schema(catalog=catalog_test_slug)
            self.solr.clear_solr(catalog=catalog_test_slug)
        elif SEARCH_TYPE == SEARCH_TYPE_ES:
            es = ESHypermap()
            es.clear_es()
        else:
            raise Exception("SEARCH_TYPE not valid=%s" % SEARCH_TYPE)

    def test_post(self):
        """
        test CSV transactions.
        :return:
        """

        print ""
        print ">>> with env:"
        print "REGISTRY_SKIP_CELERY: %s" % settings.REGISTRY_SKIP_CELERY
        print "REGISTRY_LIMIT_LAYERS: %s" % settings.REGISTRY_LIMIT_LAYERS
        print "REGISTRY_CHECK_PERIOD: %s" % settings.REGISTRY_CHECK_PERIOD
        print "REGISTRY_SEARCH_URL: %s" % settings.REGISTRY_SEARCH_URL
        print "REGISTRY_HARVEST_SERVICES: %s" % settings.REGISTRY_HARVEST_SERVICES
        print ""

        # Post the 10 Layers contained in this file: data/cswt_insert.xml
        path = os.path.join(settings.PROJECT_DIR, "..",
                            "data", "cswt_insert.xml")
        payload = open(path, 'rb').read()
        content_type = "application/xml"

        url = "/registry/{0}/csw".format(catalog_test_slug)

        res = self.client.post(url, data=payload, content_type=content_type)
        self.assertEqual(res.status_code, 200)
        self.assertEqual(Layer.objects.all().count(), 10)

        # List the Layers posted above
        url = "/registry/{0}/csw?service=CSW&version=2.0.2&request=" \
              "GetRecords&typenames=csw:Record&elementsetname=full&" \
              "resulttype=results".format(catalog_test_slug)
        res = self.client.get(url)

        self.assertEqual(res.status_code, 200)
        self.assertEqual(res.content.count("Airports (OSM)"), 1)
        self.assertEqual(res.content.count("Manaus Roads (OSM May 2016)"), 2)

        # Search one Layer posted above
        url = "/registry/{0}/csw?mode=opensearch&service=CSW&version" \
              "=2.0.2&request=GetRecords&elementsetname=full&typenames=" \
              "csw:Record&resulttype=results" \
              "&q=Airport".format(catalog_test_slug)

        res = self.client.get(url)

        self.assertEqual(res.status_code, 200)
        self.assertEqual(res.content.count("Airports (OSM)"), 1)

        # Flush layers in the cache.
        index_cached_layers()

        # Give celery some time.
        time.sleep(3)

        # are Layers in index?
        url = "{0}hypermap/_search".format(
             SEARCH_URL
        )
        res = requests.get(url)
        results_ok_in_search_backend = res.json()
        self.assertTrue("hits" in results_ok_in_search_backend)
        self.assertTrue("total" in results_ok_in_search_backend["hits"])
        self.assertEqual(results_ok_in_search_backend["hits"]["total"], 10)

    def tearDown(self):
        pass