Exemple #1
0
    def test_plugin_compatibility(self):
        """Default plugins perform as expected
        """

        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'shakemap_padang_20090930.asc')
        hazard_layer = save_to_geonode(hazard_filename)
        check_layer(hazard_layer, full=True)

        exposure_filename = os.path.join(TESTDATA,
                                         'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Test
        plugin_list = get_plugins()
        assert len(plugin_list) > 0

        geoserver = {'url': settings.GEOSERVER_BASE_URL + 'ows',
                     'name': 'Local Geoserver',
                     'version': '1.0.0',
                     'id': 0}
        metadata = get_layer_descriptors(geoserver['url'])

        msg = 'There were no layers in test geoserver'
        assert len(metadata) > 0, msg

        # Characterisation test to preserve the behaviour of
        # get_layer_descriptors. FIXME: I think we should change this to be
        # a dictionary of metadata entries (ticket #126).
        reference = [['geonode:lembang_schools',
                      {'layer_type': 'feature',
                       'category': 'exposure',
                       'subcategory': 'building',
                       'title': 'lembang_schools'}],
                     ['geonode:shakemap_padang_20090930',
                      {'layer_type': 'raster',
                       'category': 'hazard',
                       'subcategory': 'earthquake',
                       'title': 'shakemap_padang_20090930'}]]

        for entry in reference:
            name, mdblock = entry

            i = [x[0] for x in metadata].index(name)

            assert name == metadata[i][0]
            for key in entry[1]:
                assert entry[1][key] == metadata[i][1][key]

        # Check plugins are returned
        annotated_plugins = [{'name': name,
                              'doc': f.__doc__,
                              'layers': compatible_layers(f, metadata)}
                             for name, f in plugin_list.items()]

        msg = 'No compatible layers returned'
        assert len(annotated_plugins) > 0, msg
 def test_non_existing_file(self):
     """RisikoException is returned for non existing file
     """
     sampletxt = os.path.join(TESTDATA, 'smoothoperator.shp')
     try:
         save_to_geonode(sampletxt, user=self.user)
     except RisikoException, e:
         pass
Exemple #3
0
 def test_non_existing_file(self):
     """RisikoException is returned for non existing file
     """
     sampletxt = os.path.join(TESTDATA, 'smoothoperator.shp')
     try:
         save_to_geonode(sampletxt, user=self.user)
     except RisikoException, e:
         pass
 def test_extension_not_implemented(self):
     """RisikoException is returned for not compatible extensions
     """
     sampletxt = os.path.join(TESTDATA,
                              'lembang_schools_percentage_loss.dbf')
     try:
         save_to_geonode(sampletxt, user=self.user)
     except RisikoException, e:
         pass
Exemple #5
0
 def test_extension_not_implemented(self):
     """RisikoException is returned for not compatible extensions
     """
     sampletxt = os.path.join(TESTDATA,
                              'lembang_schools_percentage_loss.dbf')
     try:
         save_to_geonode(sampletxt, user=self.user)
     except RisikoException, e:
         pass
Exemple #6
0
    def XXtest_shakemap_population_exposure(self):
        """Population exposed to groundshaking matches USGS numbers
        """

        hazardfile = os.path.join(TESTDATA, 'shakemap_sumatra_20110129.tif')
        hazard_layer = save_to_geonode(hazardfile, overwrite=True,
                                       user=self.user)
        hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

        exposurefile = os.path.join(TESTDATA, 'population_indonesia_2008.tif')
        exposure_layer = save_to_geonode(exposurefile, overwrite=True,
                                         user=self.user)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        #with warnings.catch_warnings():
        #    warnings.simplefilter('ignore')
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                bbox=get_bounding_box_string(hazardfile),
                impact_function='USGSFatalityFunction',
                keywords='test,shakemap,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box(hazardfile))
        assert os.path.exists(result_layer.filename)

        # Read hazard data for reference
        hazard_raster = read_layer(hazardfile)
        H = hazard_raster.get_data()
        mmi_min, mmi_max = hazard_raster.get_extrema()

        # Read calculated result
        impact_raster = read_layer(result_layer.filename)
        I = impact_raster.get_data()
Exemple #7
0
    def test_calculate_school_damage(self):
        """Earthquake school damage calculation works via the HTTP REST API
        """

        # Upload required data first
        for filename in ['lembang_mmi_hazmap.asc',
                         'lembang_schools.shp']:
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
            check_layer(uploaded, full=True)

        # Run calculation through API
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                   hazard_server=INTERNAL_SERVER_URL,
                   hazard='geonode:lembang_mmi_hazmap',
                   exposure_server=INTERNAL_SERVER_URL,
                   exposure='geonode:lembang_schools',
                   bbox='105.592,-7.809,110.159,-5.647',
                   impact_function='Earthquake Building Damage Function',
                   keywords='test,schools,lembang',
        ))
        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()
    def test_keywords(self):
        """Keywords are read correctly from the .keywords file
        """

        for filename in ['Earthquake_Ground_Shaking.asc',
                         'Lembang_Earthquake_Scenario.asc',
                         'Padang_WGS84.shp']:

            _, ext = os.path.splitext(filename)
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)

            # Get uploaded keywords from uploaded layer object
            uploaded_keywords = uploaded.keywords
            msg = 'No keywords found in layer %s' % uploaded.name
            assert len(uploaded_keywords) > 0, msg

            # Get reference keywords from file
            keywords_file = thefile.replace(ext, '.keywords')
            f = open(keywords_file, 'r')
            keywords_list = []
            for line in f.readlines():
                keywords_list.append(line.strip().replace(' ', ''))
            f.close()

            # Verify that every keyword from file has been uploaded
            for keyword in keywords_list:
                msg = 'Could not find keyword "%s" in %s' % (keyword,
                                                             uploaded_keywords)
                assert keyword in uploaded_keywords, msg
Exemple #9
0
    def test_geotransform_from_geonode(self):
        """Geotransforms of GeoNode layers can be correctly determined
        """

        for filename in ['lembang_mmi_hazmap.asc',
                         'test_grid.asc']:

            # Upload file to GeoNode
            f = os.path.join(TESTDATA, filename)
            layer = save_to_geonode(f, user=self.user)

            # Read raster file and obtain reference resolution
            R = read_layer(f)
            ref_geotransform = R.get_geotransform()

            # Get geotransform from GeoNode
            layer_name = layer.typename
            metadata = get_metadata(INTERNAL_SERVER_URL, layer_name)

            geotransform_name = 'geotransform'
            msg = ('Could not find attribute "%s" in metadata. '
                   'Values are: %s' % (geotransform_name, metadata.keys()))
            assert geotransform_name in metadata, msg

            gn_geotransform = metadata[geotransform_name]
            msg = ('Geotransform obtained from GeoNode for layer %s '
                   'was not correct. I got %s but expected %s'
                   '' % (layer_name, gn_geotransform, ref_geotransform))
            assert numpy.allclose(ref_geotransform, gn_geotransform), msg
Exemple #10
0
    def test_metadata_available_after_upload(self):
        """Test metadata is available after upload
        """
        # Upload exposure data for this test
        name = 'Population_2010'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)
        layer_name = exposure_layer.typename
        server_url = settings.GEOSERVER_BASE_URL + '/ows'
        wcs = WebCoverageService(server_url, version='1.0.0')
        layer_appears_immediately = layer_name in wcs.contents

        wait_time = 0.5
        import time
        time.sleep(wait_time)

        wcs2 = WebCoverageService(server_url, version='1.0.0')
        layer_appears_afterwards = layer_name in wcs2.contents

        msg = ('Layer %s was not found after %s seconds in WxS contents '
               'on server %s.\n'
               'WCS contents: %s\n' % (layer_name,
                                       wait_time,
                                       server_url,
                                       wcs.contents))

        assert layer_appears_afterwards, msg

        msg = ('Layer %s was not found in WxS contents on server %s.\n'
               'WCS contents: %s\n' % (layer_name, server_url, wcs.contents))

        assert layer_appears_immediately, msg
Exemple #11
0
    def test_keywords(self):
        """Keywords are read correctly from the .keywords file
        """

        for filename in [
                'Earthquake_Ground_Shaking.asc',
                'Lembang_Earthquake_Scenario.asc', 'Padang_WGS84.shp'
        ]:

            _, ext = os.path.splitext(filename)
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)

            # Get uploaded keywords from uploaded layer object
            uploaded_keywords = uploaded.keywords
            msg = 'No keywords found in layer %s' % uploaded.name
            assert len(uploaded_keywords) > 0, msg

            # Get reference keywords from file
            keywords_file = thefile.replace(ext, '.keywords')
            f = open(keywords_file, 'r')
            keywords_list = []
            for line in f.readlines():
                keywords_list.append(line.strip().replace(' ', ''))
            f.close()

            # Verify that every keyword from file has been uploaded
            for keyword in keywords_list:
                msg = 'Could not find keyword "%s" in %s' % (keyword,
                                                             uploaded_keywords)
                assert keyword in uploaded_keywords, msg
Exemple #12
0
    def test_io(self):
        """Data can be uploaded and downloaded from internal GeoServer
        """

        # Upload a raster and a vector data set
        for filename in ['population_padang_1.asc', 'lembang_schools.shp']:
            basename, ext = os.path.splitext(filename)
            filename = os.path.join(TESTDATA, filename)

            layer = save_to_geonode(filename, user=self.user, overwrite=True)

            # Name checking
            layer_name = layer.name
            expected_name = basename.lower()
            msg = 'Expected layername %s but got %s' % (expected_name,
                                                        layer_name)
            assert layer_name == expected_name, msg

            workspace = layer.workspace

            msg = 'Expected workspace to be "geonode". Got %s' % workspace
            assert workspace == 'geonode'

            # Check metadata
            assert_bounding_box_matches(layer, filename)

            # Download layer again using workspace:name
            bbox = get_bounding_box(filename)
            downloaded_layer = download(INTERNAL_SERVER_URL,
                                        '%s:%s' % (workspace, layer_name),
                                        bbox)
            assert os.path.exists(downloaded_layer.filename)
    def test_shapefile(self):
        """Shapefile can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_schools.shp')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        assert isinstance(layer.geographic_bounding_box, basestring)
Exemple #14
0
    def test_repeated_upload(self):
        """The same file can be uploaded more than once
        """
        thefile = os.path.join(TESTDATA, 'test_grid.asc')
        uploaded1 = save_to_geonode(thefile, overwrite=True, user=self.user)
        check_layer(uploaded1, full=True)
        uploaded2 = save_to_geonode(thefile, overwrite=True, user=self.user)
        check_layer(uploaded2, full=True)
        uploaded3 = save_to_geonode(thefile, overwrite=False, user=self.user)
        check_layer(uploaded3, full=True)

        msg = ('Expected %s but got %s' % (uploaded1.name, uploaded2.name))
        assert uploaded1.name == uploaded2.name, msg

        msg = ('Expected a different name when uploading %s using '
               'overwrite=False but got %s' % (thefile, uploaded3.name))
        assert uploaded1.name != uploaded3.name, msg
Exemple #15
0
    def test_shapefile(self):
        """Shapefile can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_schools.shp')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        assert isinstance(layer.geographic_bounding_box, basestring)
Exemple #16
0
    def test_plugin_selection(self):
        """Verify the plugins can recognize compatible layers.
        """
        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'Earthquake_Ground_Shaking.asc')
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user,
                                       overwrite=True)
        check_layer(hazard_layer, full=True)

        msg = 'No keywords found in layer %s' % hazard_layer.name
        assert len(hazard_layer.keywords) > 0, msg

        exposure_filename = os.path.join(TESTDATA,
                                         'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)
        msg = 'No keywords found in layer %s' % exposure_layer.name
        assert len(exposure_layer.keywords) > 0, msg

        c = Client()
        rv = c.post('/impact/api/functions/', data={})

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)

        assert 'functions' in data

        functions = data['functions']

        # FIXME (Ariel): This test should implement an alternative function to
        # parse the requirements, but for now it will just take the buildings
        # damage one.
        for function in functions:
            if function['name'] == 'Earthquake Building Damage Function':
                layers = function['layers']

                msg_tmpl = 'Expected layer %s in list of compatible layers: %s'

                hazard_msg = msg_tmpl % (hazard_layer.typename, layers)
                assert hazard_layer.typename in layers, hazard_msg

                exposure_msg = msg_tmpl % (exposure_layer.typename, layers)
                assert exposure_layer.typename in layers, exposure_msg
Exemple #17
0
    def test_plugin_selection(self):
        """Verify the plugins can recognize compatible layers.
        """
        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'Earthquake_Ground_Shaking.asc')
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user,
                                       overwrite=True)
        check_layer(hazard_layer, full=True)

        msg = 'No keywords found in layer %s' % hazard_layer.name
        assert len(hazard_layer.keywords) > 0, msg

        exposure_filename = os.path.join(TESTDATA, 'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)
        msg = 'No keywords found in layer %s' % exposure_layer.name
        assert len(exposure_layer.keywords) > 0, msg

        c = Client()
        rv = c.post('/impact/api/functions/', data={})

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)

        assert 'functions' in data

        functions = data['functions']

        # FIXME (Ariel): This test should implement an alternative function to
        # parse the requirements, but for now it will just take the buildings
        # damage one.
        for function in functions:
            if function['name'] == 'Earthquake Building Damage Function':
                layers = function['layers']

                msg_tmpl = 'Expected layer %s in list of compatible layers: %s'

                hazard_msg = msg_tmpl % (hazard_layer.typename, layers)
                assert hazard_layer.typename in layers, hazard_msg

                exposure_msg = msg_tmpl % (exposure_layer.typename, layers)
                assert exposure_layer.typename in layers, exposure_msg
Exemple #18
0
    def test_shapefile_without_prj(self):
        """Shapefile with without prj file is rejected
        """

        thefile = os.path.join(TESTDATA, 'lembang_schools_percentage_loss.shp')
        try:
            uploaded = save_to_geonode(thefile, user=self.user)
        except RisikoException, e:
            pass
 def test_non_existing_dir(self):
     """RisikoException is returned for non existing dir
     """
     sampletxt = os.path.join(TESTDATA, 'smoothoperator')
     try:
         uploaded_layers = save_to_geonode(sampletxt, user=self.user)
         for uploaded in uploaded_layers:
             print uploaded
     except RisikoException, e:
         pass
    def test_shapefile_without_prj(self):
        """Shapefile with without prj file is rejected
        """

        thefile = os.path.join(TESTDATA,
                               'lembang_schools_percentage_loss.shp')
        try:
            uploaded = save_to_geonode(thefile, user=self.user)
        except RisikoException, e:
            pass
Exemple #21
0
    def test_asciifile_without_prj(self):
        """ASCII file with without prj file is rejected
        """

        thefile = os.path.join(TESTDATA, 'grid_without_projection.asc')

        try:
            uploaded = save_to_geonode(thefile, user=self.user)
        except RisikoException, e:
            pass
Exemple #22
0
 def test_non_existing_dir(self):
     """RisikoException is returned for non existing dir
     """
     sampletxt = os.path.join(TESTDATA, 'smoothoperator')
     try:
         uploaded_layers = save_to_geonode(sampletxt, user=self.user)
         for uploaded in uploaded_layers:
             print uploaded
     except RisikoException, e:
         pass
Exemple #23
0
    def test_calculate_fatality(self):
        """Earthquake fatalities calculation via the HTTP Rest API is correct
        """

        # Upload required data first
        for filename in ['Earthquake_Ground_Shaking.asc',
                         'Population_2010_clip.tif']:
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
            check_layer(uploaded, full=True)

        # Run calculation through API
        c = Client()
        rv = c.post('/impact/api/calculate/',
                    dict(hazard_server=INTERNAL_SERVER_URL,
                         hazard='geonode:earthquake_ground_shaking',
                         exposure='geonode:population_2010_clip',
                         exposure_server=INTERNAL_SERVER_URL,
                         bbox='99.36,-2.199,102.237,0.00',
                         impact_function='Earthquake Fatality Function',
                         keywords='test,earthquake,fatality'))

        msg = 'Expected status code 200, got %i' % rv.status_code
        self.assertEqual(rv.status_code, 200), msg

        msg = ('Expected Content-Type "application/json", '
               'got %s' % rv['Content-Type'])
        self.assertEqual(rv['Content-Type'], 'application/json'), msg

        data = json.loads(rv.content)

        if data['stacktrace'] is not None:
            msg = data['stacktrace']
            raise Exception(msg)

        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()
        assert 'bbox' in data.keys()
        assert 'impact_function' in data.keys()

        layer_uri = data['layer']

        #FIXME: This is not a good way to access the layer name
        typename = layer_uri.split('/')[4]
        name = typename.split(':')[1]

        # Check the autogenerated styles were correctly uploaded
        layer = Layer.objects.get(name=name)

        msg = ('A new style should have been created for layer [%s] '
               'got [%s] style instead.' % (name, layer.default_style.name))
        assert layer.default_style.name == name, msg
    def test_asciifile_without_prj(self):
        """ASCII file with without prj file is rejected
        """

        thefile = os.path.join(TESTDATA,
                               'grid_without_projection.asc')

        try:
            uploaded = save_to_geonode(thefile, user=self.user)
        except RisikoException, e:
            pass
    def test_repeated_upload(self):
        """The same file can be uploaded more than once
        """
        thefile = os.path.join(TESTDATA, 'test_grid.asc')
        uploaded1 = save_to_geonode(thefile, overwrite=True,
                                    user=self.user)
        check_layer(uploaded1, full=True)
        uploaded2 = save_to_geonode(thefile, overwrite=True,
                                    user=self.user)
        check_layer(uploaded2, full=True)
        uploaded3 = save_to_geonode(thefile, overwrite=False,
                                    user=self.user)
        check_layer(uploaded3, full=True)

        msg = ('Expected %s but got %s' % (uploaded1.name, uploaded2.name))
        assert uploaded1.name == uploaded2.name, msg

        msg = ('Expected a different name when uploading %s using '
               'overwrite=False but got %s' % (thefile, uploaded3.name))
        assert uploaded1.name != uploaded3.name, msg
Exemple #26
0
    def test_another_asc(self):
        """Real world ASCII file can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        # Verify metadata
        layer_name = '%s:%s' % (layer.workspace, layer.name)
        metadata = get_metadata(INTERNAL_SERVER_URL, layer_name)
        assert 'id' in metadata
        assert 'title' in metadata
        assert 'layer_type' in metadata
        assert 'keywords' in metadata
        assert 'bounding_box' in metadata
        assert 'geotransform' in metadata
        assert len(metadata['bounding_box']) == 4

        # A little metadata characterisation test
        # FIXME (Ole): Get this right when new resolution keyword
        # has been fully sorted out. There are 3 other tests failing at
        # the moment
        ref = {
            'layer_type':
            'raster',
            'keywords': {
                'category': 'hazard',
                'subcategory': 'earthquake',
                'resolution': '0.0112'
            },
            'geotransform':
            (105.29857, 0.0112, 0.0, -5.565233000000001, 0.0, -0.0112),
            'resolution':
            0.0112,
            'title':
            'lembang_mmi_hazmap'
        }

        for key in ['layer_type', 'keywords', 'geotransform', 'title']:

            if key == 'keywords':
                kwds = metadata[key]
                for k in kwds:
                    assert kwds[k] == ref[key][k]
            else:
                msg = ('Expected metadata for key %s to be %s. '
                       'Instead got %s' % (key, ref[key], metadata[key]))
                if key in ['geotransform', 'resolution']:
                    assert numpy.allclose(metadata[key], ref[key]), msg
                else:
                    assert metadata[key] == ref[key], msg
    def test_cleanup(self):
        """Cleanup functions in the utils module work
        """
        from geonode.maps.utils import cleanup

        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(uploaded, full=True)

        name = uploaded.name
        uuid = uploaded.uuid
        pk = uploaded.pk

        # try calling the cleanup function when the django record exists:
        try:
            cleanup(name, uuid)
        except GeoNodeException, e:
            pass
Exemple #28
0
    def test_cleanup(self):
        """Cleanup functions in the utils module work
        """
        from geonode.maps.utils import cleanup

        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(uploaded, full=True)

        name = uploaded.name
        uuid = uploaded.uuid
        pk = uploaded.pk

        # try calling the cleanup function when the django record exists:
        try:
            cleanup(name, uuid)
        except GeoNodeException, e:
            pass
Exemple #29
0
    def test_native_raster_resolution(self):
        """Raster layer retains native resolution through Geoserver

        Raster layer can be uploaded and downloaded again with
        native resolution. This is one test for ticket #103
        """

        hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc' %
                           TESTDATA)

        # Get reference values
        H = read_layer(hazard_filename)
        A_ref = H.get_data(nan=True)
        depth_min_ref, depth_max_ref = H.get_extrema()

        # Upload to internal geonode
        hazard_layer = save_to_geonode(hazard_filename, user=self.user)
        hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

        # Download data again with native resolution
        bbox = get_bounding_box_string(hazard_filename)
        H = download(INTERNAL_SERVER_URL, hazard_name, bbox)
        A = H.get_data(nan=True)

        # Compare shapes
        msg = ('Shape of downloaded raster was [%i, %i]. '
               'Expected [%i, %i].' %
               (A.shape[0], A.shape[1], A_ref.shape[0], A_ref.shape[1]))
        assert numpy.allclose(A_ref.shape, A.shape, rtol=0, atol=0), msg

        # Compare extrema to values reference values (which have also been
        # verified by QGIS for this layer and tested in test_engine.py)
        depth_min, depth_max = H.get_extrema()
        msg = ('Extrema of downloaded file were [%f, %f] but '
               'expected [%f, %f]' %
               (depth_min, depth_max, depth_min_ref, depth_max_ref))
        assert numpy.allclose([depth_min, depth_max],
                              [depth_min_ref, depth_max_ref],
                              rtol=1.0e-6,
                              atol=1.0e-10), msg

        # Compare data number by number
        assert nanallclose(A, A_ref, rtol=1.0e-8)
Exemple #30
0
    def test_calculate_school_damage(self):
        """Earthquake school damage calculation works via the HTTP REST API
        """

        # Upload required data first
        for filename in ['lembang_mmi_hazmap.asc',
                         'lembang_schools.shp']:
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
            check_layer(uploaded, full=True)

        # Run calculation through API
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                   hazard_server=INTERNAL_SERVER_URL,
                   hazard='geonode:lembang_mmi_hazmap',
                   exposure_server=INTERNAL_SERVER_URL,
                   exposure='geonode:lembang_schools',
                   bbox='105.592,-7.809,110.159,-5.647',
                   impact_function='Earthquake Building Damage Function',
                   keywords='test,schools,lembang',
        ))

        msg = 'Expected status code 200, got %i' % rv.status_code
        self.assertEqual(rv.status_code, 200), msg

        msg = ('Expected Content-Type "application/json", '
               'got %s' % rv['Content-Type'])
        self.assertEqual(rv['Content-Type'], 'application/json'), msg

        data = json.loads(rv.content)

        if data['stacktrace'] is not None:
            msg = data['stacktrace']
            raise Exception(msg)

        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()
    def test_another_asc(self):
        """Real world ASCII file can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        # Verify metadata
        layer_name = '%s:%s' % (layer.workspace, layer.name)
        metadata = get_metadata(INTERNAL_SERVER_URL,
                                layer_name)
        assert 'id' in metadata
        assert 'title' in metadata
        assert 'layer_type' in metadata
        assert 'keywords' in metadata
        assert 'bounding_box' in metadata
        assert 'geotransform' in metadata
        assert len(metadata['bounding_box']) == 4

        # A little metadata characterisation test
        ref = {'layer_type': 'raster',
               'keywords': {'category': 'hazard',
                            'subcategory': 'earthquake'},
               'geotransform': (105.29857, 0.0112, 0.0,
                                -5.565233000000001, 0.0, -0.0112),
               'title': 'lembang_mmi_hazmap'}

        for key in ['layer_type', 'keywords', 'geotransform',
                    'title']:

            msg = ('Expected metadata for key %s to be %s. '
                   'Instead got %s' % (key, ref[key], metadata[key]))
            if key == 'geotransform':
                assert numpy.allclose(metadata[key], ref[key]), msg
            else:
                assert metadata[key] == ref[key], msg

            if key == 'keywords':
                kwds = metadata[key]
                for k in kwds:
                    assert kwds[k] == ref[key][k]
    def Xtest_raster_upload(self):
        """Raster layer can be uploaded and downloaded again correctly
        """

        hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc'
                           % TESTDATA)

        # Get reference values
        H = read_layer(hazard_filename)
        A_ref = H.get_data()
        depth_min_ref, depth_max_ref = H.get_extrema()

        # Upload to internal geonode
        hazard_layer = save_to_geonode(hazard_filename, user=self.user)
        hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

        # Download data again
        bbox = get_bounding_box_string(hazard_filename)
        H = download(INTERNAL_SERVER_URL, hazard_name, bbox)
        A = H.get_data()

        # Compare shapes
        msg = ('Shape of downloaded raster was [%i, %i]. '
               'Expected [%i, %i].' % (A.shape[0], A.shape[1],
                                       A_ref.shape[0], A_ref.shape[1]))
        assert numpy.allclose(A_ref.shape, A.shape, rtol=0, atol=0), msg

        # Compare extrema to values reference values (which have also been
        # verified by QGIS for this layer and tested in test_engine.py)
        depth_min, depth_max = H.get_extrema()
        msg = ('Extrema of downloaded file were [%f, %f] but '
               'expected [%f, %f]' % (depth_min, depth_max,
                                      depth_min_ref, depth_max_ref))
        assert numpy.allclose([depth_min, depth_max],
                              [depth_min_ref, depth_max_ref],
                              rtol=1.0e-6, atol=1.0e-10), msg
Exemple #33
0
    def test_layer_upload(self):
        """Layers can be uploaded to local GeoNode
        """

        expected_layers = []
        not_expected_layers = []
        datadir = TESTDATA
        BAD_LAYERS = ['grid_without_projection.asc',
                      'kecamatan_prj.shp']  # FIXME(Ole): This layer is not
        # 'BAD', just in a different
        # projection (TM3_Zone_48-2) so
        # serves as another test for
        # issue #40
        for root, dirs, files in os.walk(datadir):
            for filename in files:
                basename, extension = os.path.splitext(filename)

                if extension.lower() in LAYER_TYPES:
                    # FIXME(Ole): GeoNode converts names to lower case
                    name = unicode(basename.lower())
                    if filename in BAD_LAYERS:
                        not_expected_layers.append(name)
                    else:
                        expected_layers.append(name)

        # Upload
        layers = save_to_geonode(datadir,
                                 user=self.user,
                                 overwrite=True,
                                 ignore=BAD_LAYERS)

        # Check integrity
        layer_names = [l.name for l in layers]
        for layer in layers:
            msg = 'Layer %s was uploaded but not expected' % layer.name
            assert layer.name in expected_layers, msg

            # Uncomment to reproduce issue #102
            # This may still also reproduce issue #40 for layer
            # tsunami_max_inundation_depth_bb_utm
            #check_layer(layer, full=True)

        for layer_name in expected_layers:
            msg = ('The following layer should have been uploaded '
                   'but was not: %s' % layer_name)
            assert layer_name in layer_names, msg

            # Check the layer is in the Django database
            Layer.objects.get(name=layer_name)

            # Check that layer is in geoserver
            found = False
            gs_username, gs_password = settings.GEOSERVER_CREDENTIALS
            page = get_web_page(os.path.join(settings.GEOSERVER_BASE_URL,
                                             'rest/layers'),
                                username=gs_username,
                                password=gs_password)
            for line in page:
                if line.find('rest/layers/%s.html' % layer_name) > 0:
                    found = True
            if not found:
                msg = (
                    'Upload could not be verified, the layer %s is not '
                    'in geoserver %s, but GeoNode did not raise any errors, '
                    'this should never happen.' %
                    (layer_name, settings.GEOSERVER_BASE_URL))
                raise GeoNodeException(msg)

        server_url = settings.GEOSERVER_BASE_URL + 'ows?'

        # Verify that the GeoServer GetCapabilities record is accessible:
        metadata = get_metadata(server_url)
        msg = ('The metadata list should not be empty in server %s' %
               server_url)
        assert len(metadata) > 0, msg
    def test_metadata_twice(self):
        """Layer metadata can be correctly uploaded multiple times
        """

        # This test reproduces ticket #99 by creating new data,
        # uploading twice and verifying metadata

        # Base test data
        filenames = ['Lembang_Earthquake_Scenario.asc',
                     'lembang_schools.shp']

        for org_filename in filenames:
            org_basename, ext = os.path.splitext(os.path.join(TESTDATA,
                                                              org_filename))

            # Copy data to temporary unique name
            basename = unique_filename(dir='/tmp')

            cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename)
            os.system(cmd)

            cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename)
            os.system(cmd)

            if ext == '.asc':
                layer_type = 'raster'
                filename = '%s.asc' % basename
                cmd = '/bin/cp %s.asc %s' % (org_basename, filename)
                os.system(cmd)
            elif ext == '.shp':
                layer_type = 'vector'
                filename = '%s.shp' % basename
                for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']:
                    cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e,
                                                   basename, e)
                    os.system(cmd)
            else:
                msg = ('Unknown layer extension in %s. '
                       'Expected .shp or .asc' % filename)
                raise Exception(msg)

            # Repeat multiple times
            for i in range(3):

                # Upload
                layer = save_to_geonode(filename, user=self.user,
                                        overwrite=True)

                # Get metadata
                layer_name = '%s:%s' % (layer.workspace, layer.name)
                metadata = get_metadata(INTERNAL_SERVER_URL,
                                        layer_name)

                # Verify
                assert 'id' in metadata
                assert 'title' in metadata
                assert 'layer_type' in metadata
                assert 'keywords' in metadata
                assert 'bounding_box' in metadata
                assert len(metadata['bounding_box']) == 4

                # Check integrity between Django layer and file
                assert_bounding_box_matches(layer, filename)

                # Check integrity between file and OWS metadata
                ref_bbox = get_bounding_box(filename)
                msg = ('Bounding box from OWS did not match bounding box '
                       'from file. They are\n'
                       'From file %s: %s\n'
                       'From OWS: %s' % (filename,
                                         ref_bbox,
                                         metadata['bounding_box']))

                assert numpy.allclose(metadata['bounding_box'],
                                      ref_bbox), msg
                assert layer.name == metadata['title']
                assert layer_name == metadata['id']
                assert layer_type == metadata['layer_type']

                # Check keywords
                if layer_type == 'raster':
                    category = 'hazard'
                    subcategory = 'earthquake'
                elif layer_type == 'vector':
                    category = 'exposure'
                    subcategory = 'building'
                else:
                    msg = 'Unknown layer type %s' % layer_type
                    raise Exception(msg)

                keywords = metadata['keywords']

                msg = 'Did not find key "category" in keywords: %s' % keywords
                assert 'category' in keywords, msg

                msg = ('Did not find key "subcategory" in keywords: %s'
                       % keywords)
                assert 'subcategory' in keywords, msg

                msg = ('Category keyword %s did not match expected %s'
                       % (keywords['category'], category))
                assert category == keywords['category'], msg

                msg = ('Subcategory keyword %s did not match expected %s'
                       % (keywords['subcategory'], category))
                assert subcategory == keywords['subcategory'], msg
Exemple #35
0
    def test_metadata(self):
        """Metadata is retrieved correctly for both raster and vector data
        """

        # Upload test data
        filenames = [
            'Lembang_Earthquake_Scenario.asc', 'Earthquake_Ground_Shaking.asc',
            'lembang_schools.shp', 'Padang_WGS84.shp'
        ]
        layers = []
        paths = []
        for filename in filenames:
            basename, ext = os.path.splitext(filename)

            path = os.path.join(TESTDATA, filename)
            layer = save_to_geonode(path, user=self.user, overwrite=True)

            # Record layer and file
            layers.append(layer)
            paths.append(path)

        # Check integrity
        for i, layer in enumerate(layers):

            if filenames[i].endswith('.shp'):
                layer_type = 'vector'
            elif filenames[i].endswith('.asc'):
                layer_type = 'raster'
            else:
                msg = ('Unknown layer extension in %s. '
                       'Expected .shp or .asc' % filenames[i])
                raise Exception(msg)

            layer_name = '%s:%s' % (layer.workspace, layer.name)
            metadata = get_metadata(INTERNAL_SERVER_URL, layer_name)

            assert 'id' in metadata
            assert 'title' in metadata
            assert 'layer_type' in metadata
            assert 'keywords' in metadata
            assert 'bounding_box' in metadata
            assert len(metadata['bounding_box']) == 4

            # Check integrity between Django layer and file
            assert_bounding_box_matches(layer, paths[i])

            # Check integrity between file and OWS metadata
            ref_bbox = get_bounding_box(paths[i])
            msg = ('Bounding box from OWS did not match bounding box '
                   'from file. They are\n'
                   'From file %s: %s\n'
                   'From OWS: %s' %
                   (paths[i], ref_bbox, metadata['bounding_box']))

            assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg
            assert layer.name == metadata['title']
            assert layer_name == metadata['id']
            assert layer_type == metadata['layer_type']

            # Check keywords
            if layer_type == 'raster':
                category = 'hazard'
                subcategory = 'earthquake'
            elif layer_type == 'vector':
                category = 'exposure'
                subcategory = 'building'
            else:
                msg = 'Unknown layer type %s' % layer_type
                raise Exception(msg)

            keywords = metadata['keywords']

            msg = 'Did not find key "category" in keywords: %s' % keywords
            assert 'category' in keywords, msg

            msg = 'Did not find key "subcategory" in keywords: %s' % keywords
            assert 'subcategory' in keywords, msg

            msg = ('Category keyword %s did not match expected %s' %
                   (keywords['category'], category))
            assert category == keywords['category'], msg

            msg = ('Subcategory keyword %s did not match expected %s' %
                   (keywords['subcategory'], category))
            assert subcategory == keywords['subcategory'], msg
Exemple #36
0
    def test_metadata_twice(self):
        """Layer metadata can be correctly uploaded multiple times
        """

        # This test reproduces ticket #99 by creating new data,
        # uploading twice and verifying metadata

        # Base test data
        filenames = ['Lembang_Earthquake_Scenario.asc', 'lembang_schools.shp']

        for org_filename in filenames:
            org_basename, ext = os.path.splitext(
                os.path.join(TESTDATA, org_filename))

            # Copy data to temporary unique name
            basename = unique_filename(dir='/tmp')

            cmd = '/bin/cp %s.keywords %s.keywords' % (org_basename, basename)
            os.system(cmd)

            cmd = '/bin/cp %s.prj %s.prj' % (org_basename, basename)
            os.system(cmd)

            if ext == '.asc':
                layer_type = 'raster'
                filename = '%s.asc' % basename
                cmd = '/bin/cp %s.asc %s' % (org_basename, filename)
                os.system(cmd)
            elif ext == '.shp':
                layer_type = 'vector'
                filename = '%s.shp' % basename
                for e in ['shp', 'shx', 'sbx', 'sbn', 'dbf']:
                    cmd = '/bin/cp %s.%s %s.%s' % (org_basename, e, basename,
                                                   e)
                    os.system(cmd)
            else:
                msg = ('Unknown layer extension in %s. '
                       'Expected .shp or .asc' % filename)
                raise Exception(msg)

            # Repeat multiple times
            for i in range(3):

                # Upload
                layer = save_to_geonode(filename,
                                        user=self.user,
                                        overwrite=True)

                # Get metadata
                layer_name = '%s:%s' % (layer.workspace, layer.name)
                metadata = get_metadata(INTERNAL_SERVER_URL, layer_name)

                # Verify
                assert 'id' in metadata
                assert 'title' in metadata
                assert 'layer_type' in metadata
                assert 'keywords' in metadata
                assert 'bounding_box' in metadata
                assert len(metadata['bounding_box']) == 4

                # Check integrity between Django layer and file
                assert_bounding_box_matches(layer, filename)

                # Check integrity between file and OWS metadata
                ref_bbox = get_bounding_box(filename)
                msg = ('Bounding box from OWS did not match bounding box '
                       'from file. They are\n'
                       'From file %s: %s\n'
                       'From OWS: %s' %
                       (filename, ref_bbox, metadata['bounding_box']))

                assert numpy.allclose(metadata['bounding_box'], ref_bbox), msg
                assert layer.name == metadata['title']
                assert layer_name == metadata['id']
                assert layer_type == metadata['layer_type']

                # Check keywords
                if layer_type == 'raster':
                    category = 'hazard'
                    subcategory = 'earthquake'
                elif layer_type == 'vector':
                    category = 'exposure'
                    subcategory = 'building'
                else:
                    msg = 'Unknown layer type %s' % layer_type
                    raise Exception(msg)

                keywords = metadata['keywords']

                msg = 'Did not find key "category" in keywords: %s' % keywords
                assert 'category' in keywords, msg

                msg = ('Did not find key "subcategory" in keywords: %s' %
                       keywords)
                assert 'subcategory' in keywords, msg

                msg = ('Category keyword %s did not match expected %s' %
                       (keywords['category'], category))
                assert category == keywords['category'], msg

                msg = ('Subcategory keyword %s did not match expected %s' %
                       (keywords['subcategory'], category))
                assert subcategory == keywords['subcategory'], msg
    def test_layer_upload(self):
        """Layers can be uploaded to local GeoNode
        """

        expected_layers = []
        not_expected_layers = []
        datadir = TESTDATA
        BAD_LAYERS = ['grid_without_projection.asc',
                      'kecamatan_prj.shp']  # FIXME(Ole): This layer is not
                                            # 'BAD', just in a different
                                            # projection (TM3_Zone_48-2) so
                                            # serves as another test for
                                            # issue #40

        for root, dirs, files in os.walk(datadir):
            for filename in files:
                basename, extension = os.path.splitext(filename)

                if extension.lower() in LAYER_TYPES:
                    # FIXME(Ole): GeoNode converts names to lower case
                    name = unicode(basename.lower())
                    if filename in BAD_LAYERS:
                        not_expected_layers.append(name)
                    else:
                        expected_layers.append(name)

        # Upload
        layers = save_to_geonode(datadir, user=self.user, overwrite=True,
                                 ignore=BAD_LAYERS)

        # Check integrity
        layer_names = [l.name for l in layers]
        for layer in layers:
            msg = 'Layer %s was uploaded but not expected' % layer.name
            assert layer.name in expected_layers, msg

            # Uncomment to reproduce issue #102
            # This may still also reproduce issue #40 for layer
            # tsunami_max_inundation_depth_bb_utm
            #check_layer(layer, full=True)

        for layer_name in expected_layers:
            msg = ('The following layer should have been uploaded '
                   'but was not: %s' % layer_name)
            assert layer_name in layer_names, msg

            # Check the layer is in the Django database
            Layer.objects.get(name=layer_name)

            # Check that layer is in geoserver
            found = False
            gs_username, gs_password = settings.GEOSERVER_CREDENTIALS
            page = get_web_page(os.path.join(settings.GEOSERVER_BASE_URL,
                                             'rest/layers'),
                                             username=gs_username,
                                             password=gs_password)
            for line in page:
                if line.find('rest/layers/%s.html' % layer_name) > 0:
                    found = True
            if not found:
                msg = ('Upload could not be verified, the layer %s is not '
                   'in geoserver %s, but GeoNode did not raise any errors, '
                   'this should never happen.' %
                   (layer_name, settings.GEOSERVER_BASE_URL))
                raise GeoNodeException(msg)

        server_url = settings.GEOSERVER_BASE_URL + 'ows?'

        # Verify that the GeoServer GetCapabilities record is accessible:
        metadata = get_metadata(server_url)
        msg = ('The metadata list should not be empty in server %s'
                % server_url)
        assert len(metadata) > 0, msg
Exemple #38
0
    def test_specified_raster_resolution(self):
        """Raster layers can be downloaded with specific resolution

        This is another test for ticket #103

        Native test data:

        maumere....asc
        ncols 931
        nrows 463
        cellsize 0.00018

        Population_Jakarta
        ncols         638
        nrows         649
        cellsize      0.00045228819716044

        Population_2010
        ncols         5525
        nrows         2050
        cellsize      0.0083333333333333


        Here we download it at a range of fixed resolutions that
        are both coarser and finer, and check that the dimensions
        of the downloaded matrix are as expected.

        We also check that the extrema of the subsampled matrix are sane
        """

        for test_filename in [
                'maumere_aos_depth_20m_land_wgs84.asc',
                'Population_Jakarta_geographic.asc', 'Population_2010.asc'
        ]:

            hazard_filename = ('%s/%s' % (TESTDATA, test_filename))

            # Get reference values
            H = read_layer(hazard_filename)
            depth_min_ref, depth_max_ref = H.get_extrema()
            native_resolution = H.get_resolution()

            # Upload to internal geonode
            hazard_layer = save_to_geonode(hazard_filename, user=self.user)
            hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

            # Test for a range of resolutions
            for res in [
                    0.02,
                    0.01,
                    0.005,
                    0.002,
                    0.001,
                    0.0005,  # Coarser
                    0.0002,
                    0.0001,
                    0.00006,
                    0.00003
            ]:  # Finer

                # To save time don't do finest resolution for the
                # two population sets
                if test_filename.startswith('Population') and res < 0.00006:
                    break

                # Set bounding box
                bbox = get_bounding_box_string(hazard_filename)
                compare_extrema = True
                if test_filename == 'Population_2010.asc':
                    # Make bbox small for finer resolutions to
                    # save time and to test that as well.
                    # However, extrema obviously won't match those
                    # of the full dataset. Once we can clip
                    # datasets, we can remove this restriction.
                    if res < 0.005:
                        bbox = '106.685974,-6.373421,106.974534,-6.079886'
                        compare_extrema = False
                bb = bboxstring2list(bbox)

                # Download data at specified resolution
                H = download(INTERNAL_SERVER_URL,
                             hazard_name,
                             bbox,
                             resolution=res)
                A = H.get_data()

                # Verify that data has the requested bobx and resolution
                actual_bbox = H.get_bounding_box()
                msg = ('Bounding box for %s was not as requested. I got %s '
                       'but '
                       'expected %s' % (hazard_name, actual_bbox, bb))
                assert numpy.allclose(actual_bbox, bb, rtol=1.0e-6)

                # FIXME (Ole): How do we sensibly resolve the issue with
                #              resx, resy vs one resolution (issue #173)
                actual_resolution = H.get_resolution()[0]

                # FIXME (Ole): Resolution is often far from the requested
                #              see issue #102
                #              Here we have to accept up to 5%
                tolerance102 = 5.0e-2
                msg = ('Resolution of %s was not as requested. I got %s but '
                       'expected %s' % (hazard_name, actual_resolution, res))
                assert numpy.allclose(actual_resolution,
                                      res,
                                      rtol=tolerance102), msg

                # Determine expected shape from bbox (W, S, E, N)
                ref_rows = int(round((bb[3] - bb[1]) / res))
                ref_cols = int(round((bb[2] - bb[0]) / res))

                # Compare shapes (generally, this may differ by 1)
                msg = ('Shape of downloaded raster was [%i, %i]. '
                       'Expected [%i, %i].' %
                       (A.shape[0], A.shape[1], ref_rows, ref_cols))
                assert (ref_rows == A.shape[0] and ref_cols == A.shape[1]), msg

                # Assess that the range of the interpolated data is sane
                if not compare_extrema:
                    continue

                # For these test sets we get exact match of the minimum
                msg = (
                    'Minimum of %s resampled at resolution %f '
                    'was %f. Expected %f.' %
                    (hazard_layer.name, res, numpy.nanmin(A), depth_min_ref))
                assert numpy.allclose(depth_min_ref,
                                      numpy.nanmin(A),
                                      rtol=0.0,
                                      atol=0.0), msg

                # At the maximum it depends on the subsampling
                msg = (
                    'Maximum of %s resampled at resolution %f '
                    'was %f. Expected %f.' %
                    (hazard_layer.name, res, numpy.nanmax(A), depth_max_ref))
                if res < native_resolution[0]:
                    # When subsampling to finer resolutions we expect a
                    # close match
                    assert numpy.allclose(depth_max_ref,
                                          numpy.nanmax(A),
                                          rtol=1.0e-10,
                                          atol=1.0e-8), msg
                elif res < native_resolution[0] * 10:
                    # When upsampling to coarser resolutions we expect
                    # ballpark match (~20%)
                    assert numpy.allclose(depth_max_ref,
                                          numpy.nanmax(A),
                                          rtol=0.17,
                                          atol=0.0), msg
                else:
                    # Upsampling to very coarse resolutions, just want sanity
                    assert 0 < numpy.nanmax(A) <= depth_max_ref
Exemple #39
0
    def test_the_earthquake_fatality_estimation_allen(self):
        """Fatality computation computed correctly with GeoServer Data
        """

        # Simulate bounding box from application
        viewport_bbox_string = '104.3,-8.2,110.04,-5.17'

        # Upload exposure data for this test
        name = 'Population_2010'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)

        workspace = exposure_layer.workspace
        msg = 'Expected workspace to be "geonode". Got %s' % workspace
        assert workspace == 'geonode'

        layer_name = exposure_layer.name
        msg = 'Expected layer name to be "%s". Got %s' % (name, layer_name)
        assert layer_name == name.lower(), msg

        exposure_name = '%s:%s' % (workspace, layer_name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Now we know that exposure layer is good, lets upload some
        # hazard layers and do the calculations
        filename = 'Lembang_Earthquake_Scenario.asc'

        # Save
        hazard_filename = '%s/%s' % (TESTDATA, filename)
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user, overwrite=True)
        hazard_name = '%s:%s' % (hazard_layer.workspace,
                                 hazard_layer.name)

        # Check metadata
        assert_bounding_box_matches(hazard_layer, hazard_filename)
        haz_bbox_string = get_bounding_box_string(hazard_filename)
        check_layer(hazard_layer, full=True)

        # Run calculation
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                #bbox=viewport_bbox_string,
                bbox=exp_bbox_string,  # This one reproduced the
                                       # crash for lembang
                impact_function='EarthquakeFatalityFunction',
                keywords='test,shakemap,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        if 'errors' in data:
            errors = data['errors']
            if errors is not None:
                msg = ('The server returned the error message: %s'
                       % str(errors))
                raise Exception(msg)

        assert 'success' in data
        assert 'hazard_layer' in data
        assert 'exposure_layer' in data
        assert 'run_duration' in data
        assert 'run_date' in data
        assert 'layer' in data

        assert data['success']

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box_string(hazard_filename))
        assert os.path.exists(result_layer.filename)
Exemple #40
0
    def test_linked_datasets(self):
        """Linked datesets can be pulled in e.g. to include gender break down
        """

        # Upload exposure data for this test. This will automatically
        # pull in female_pct_yogya.asc through its "associates" keyword
        name = 'population_yogya'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Upload hazard data
        filename = 'eq_yogya_2006.asc'
        hazard_filename = '%s/%s' % (TESTDATA, filename)
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user, overwrite=True)
        hazard_name = '%s:%s' % (hazard_layer.workspace,
                                 hazard_layer.name)

        # Check metadata
        assert_bounding_box_matches(hazard_layer, hazard_filename)
        haz_bbox_string = get_bounding_box_string(hazard_filename)
        check_layer(hazard_layer, full=True)

        # Run calculation
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                bbox=haz_bbox_string,
                impact_function='EarthquakeFatalityFunction',
                keywords='test,fatalities,population,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        if 'errors' in data:
            errors = data['errors']
            if errors is not None:
                msg = ('The server returned the error message: %s'
                       % str(errors))
                raise Exception(msg)

        assert 'success' in data
        assert 'hazard_layer' in data
        assert 'exposure_layer' in data
        assert 'run_duration' in data
        assert 'run_date' in data
        assert 'layer' in data

        assert data['success']

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box_string(hazard_filename))
        assert os.path.exists(result_layer.filename)

        # Check calculated values
        keywords = result_layer.get_keywords()

        assert 'caption' in keywords
 def test_tiff(self):
     """GeoTIF file can be uploaded
     """
     thefile = os.path.join(TESTDATA, 'Population_2010_clip.tif')
     uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
     check_layer(uploaded, full=True)
 def test_asc(self):
     """ASCII file can be uploaded
     """
     thefile = os.path.join(TESTDATA, 'test_grid.asc')
     uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
     check_layer(uploaded, full=True)
Exemple #43
0
    def test_jakarta_flood_study(self):
        """HKV Jakarta flood study calculated correctly using the API
        """

        # FIXME (Ole): Redo with population as shapefile later

        # Expected values from HKV
        expected_values = [2485442, 1537920]

        # Name files for hazard level, exposure and expected fatalities
        population = 'Population_Jakarta_geographic'
        plugin_name = 'FloodImpactFunction'

        # Upload exposure data for this test
        exposure_filename = '%s/%s.asc' % (TESTDATA, population)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)

        workspace = exposure_layer.workspace
        msg = 'Expected workspace to be "geonode". Got %s' % workspace
        assert workspace == 'geonode'

        layer_name = exposure_layer.name
        msg = 'Expected layer name to be "%s". Got %s' % (population,
                                                          layer_name)
        assert layer_name.lower() == population.lower(), msg

        exposure_name = '%s:%s' % (workspace, layer_name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Now we know that exposure layer is good, lets upload some
        # hazard layers and do the calculations

        i = 0
        for filename in ['Flood_Current_Depth_Jakarta_geographic.asc',
                         'Flood_Design_Depth_Jakarta_geographic.asc']:

            hazard_filename = os.path.join(TESTDATA, filename)
            exposure_filename = os.path.join(TESTDATA, population)

            # Save
            hazard_filename = '%s/%s' % (TESTDATA, filename)
            hazard_layer = save_to_geonode(hazard_filename,
                                           user=self.user, overwrite=True)
            hazard_name = '%s:%s' % (hazard_layer.workspace,
                                     hazard_layer.name)

            # Check metadata
            assert_bounding_box_matches(hazard_layer, hazard_filename)
            haz_bbox_string = get_bounding_box_string(hazard_filename)
            check_layer(hazard_layer, full=True)

            # Run calculation
            c = Client()
            rv = c.post('/impact/api/calculate/', data=dict(
                    hazard_server=INTERNAL_SERVER_URL,
                    hazard=hazard_name,
                    exposure_server=INTERNAL_SERVER_URL,
                    exposure=exposure_name,
                    bbox=exp_bbox_string,
                    impact_function=plugin_name,
                    keywords='test,flood,HKV'))

            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv['Content-Type'], 'application/json')
            data = json.loads(rv.content)
            if 'errors' in data:
                errors = data['errors']
                if errors is not None:
                    raise Exception(errors)

            assert 'hazard_layer' in data
            assert 'exposure_layer' in data
            assert 'run_duration' in data
            assert 'run_date' in data
            assert 'layer' in data

            # Do calculation manually and check result
            hazard_raster = read_layer(hazard_filename)
            H = hazard_raster.get_data(nan=0)

            exposure_raster = read_layer(exposure_filename + '.asc')
            P = exposure_raster.get_data(nan=0)

            # Calculate impact manually
            pixel_area = 2500
            I = numpy.where(H > 0.1, P, 0) / 100000.0 * pixel_area

            # Verify correctness against results from HKV
            res = sum(I.flat)
            ref = expected_values[i]
            #print filename, 'Result=%f' % res, ' Expected=%f' % ref
            #print 'Pct relative error=%f' % (abs(res-ref)*100./ref)

            msg = 'Got result %f but expected %f' % (res, ref)
            assert numpy.allclose(res, ref, rtol=1.0e-2), msg

            # Verify correctness of result
            # Download result and check
            layer_name = data['layer'].split('/')[-1]

            result_layer = download(INTERNAL_SERVER_URL,
                                    layer_name,
                                    get_bounding_box_string(hazard_filename))
            assert os.path.exists(result_layer.filename)

            calculated_raster = read_layer(result_layer.filename)
            C = calculated_raster.get_data(nan=0)

            # FIXME (Ole): Bring this back
            # Check caption
            #caption = calculated_raster.get_caption()
            #print
            #print caption
            #expct = 'people'
            #msg = ('Caption %s did not contain expected '
            #       'keyword %s' % (caption, expct))
            #assert expct in caption, msg

            # Compare shape and extrema
            msg = ('Shape of calculated raster differs from reference raster: '
                   'C=%s, I=%s' % (C.shape, I.shape))
            assert numpy.allclose(C.shape, I.shape,
                                  rtol=1e-12, atol=1e-12), msg

            msg = ('Minimum of calculated raster differs from reference '
                   'raster: '
                   'C=%s, I=%s' % (numpy.nanmin(C), numpy.nanmin(I)))
            assert numpy.allclose(numpy.nanmin(C), numpy.nanmin(I),
                                  rtol=1e-6, atol=1e-12), msg
            msg = ('Maximum of calculated raster differs from reference '
                   'raster: '
                   'C=%s, I=%s' % (numpy.nanmax(C), numpy.nanmax(I)))
            assert numpy.allclose(numpy.nanmax(C), numpy.nanmax(I),
                                  rtol=1e-6, atol=1e-12), msg

            # Compare every single value numerically (a bit loose -
            # probably due to single precision conversions when
            # data flows through geonode)
            #
            # FIXME: Not working - but since this test is about
            # issue #162 we'll leave it for now. TODO with NAN
            # Manually verified that the two expected values are correct,
            # though.
            #msg = 'Array values of written raster array were not as expected'
            #print C
            #print I
            #print numpy.amax(numpy.abs(C-I))
            #assert numpy.allclose(C, I, rtol=1e-2, atol=1e-5), msg

            # Check that extrema are in range
            xmin, xmax = calculated_raster.get_extrema()

            assert numpy.alltrue(C[-numpy.isnan(C)] >= xmin), msg
            assert numpy.alltrue(C[-numpy.isnan(C)] <= xmax)
            assert numpy.alltrue(C[-numpy.isnan(C)] >= 0)

            i += 1
Exemple #44
0
    def test_lembang_building_examples(self):
        """Lembang building impact calculation works through the API
        """

        # Test for a range of hazard layers

        for mmi_filename in ['lembang_mmi_hazmap.asc']:
                             #'Lembang_Earthquake_Scenario.asc']:

            # Upload input data
            hazardfile = os.path.join(TESTDATA, mmi_filename)
            hazard_layer = save_to_geonode(hazardfile, user=self.user)
            hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

            exposurefile = os.path.join(TESTDATA, 'lembang_schools.shp')
            exposure_layer = save_to_geonode(exposurefile, user=self.user)
            exposure_name = '%s:%s' % (exposure_layer.workspace,
                                       exposure_layer.name)

            # Call calculation routine

            # FIXME (Ole): The system freaks out if there are spaces in
            #              bbox string. Please let us catch that and deal
            #              nicely with it - also do this in download()
            bbox = '105.592,-7.809,110.159,-5.647'

            #print
            #print get_bounding_box(hazardfile)
            #print get_bounding_box(exposurefile)

            with warnings.catch_warnings():
                warnings.simplefilter('ignore')

                c = Client()
                rv = c.post('/impact/api/calculate/', data=dict(
                        hazard_server=INTERNAL_SERVER_URL,
                        hazard=hazard_name,
                        exposure_server=INTERNAL_SERVER_URL,
                        exposure=exposure_name,
                        bbox=bbox,
                        impact_function='Earthquake Building Damage Function',
                        keywords='test,schools,lembang',
                        ))

            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv['Content-Type'], 'application/json')
            data = json.loads(rv.content)
            assert 'hazard_layer' in data.keys()
            assert 'exposure_layer' in data.keys()
            assert 'run_duration' in data.keys()
            assert 'run_date' in data.keys()
            assert 'layer' in data.keys()

            # Download result and check
            layer_name = data['layer'].split('/')[-1]

            result_layer = download(INTERNAL_SERVER_URL,
                                    layer_name,
                                    bbox)
            assert os.path.exists(result_layer.filename)

            # Read hazard data for reference
            hazard_raster = read_layer(hazardfile)
            A = hazard_raster.get_data()
            mmi_min, mmi_max = hazard_raster.get_extrema()

            # Read calculated result
            impact_vector = read_layer(result_layer.filename)
            coordinates = impact_vector.get_geometry()
            attributes = impact_vector.get_data()

            # Verify calculated result
            count = 0
            for i in range(len(attributes)):
                lon, lat = coordinates[i][:]
                calculated_mmi = attributes[i]['MMI']

                if calculated_mmi == 0.0:
                    # FIXME (Ole): Some points have MMI==0 here.
                    # Weird but not a show stopper
                    continue

                # Check that interpolated points are within range
                msg = ('Interpolated mmi %f was outside extrema: '
                       '[%f, %f] at location '
                       '[%f, %f]. ' % (calculated_mmi,
                                       mmi_min, mmi_max,
                                       lon, lat))
                assert mmi_min <= calculated_mmi <= mmi_max, msg

                # Check calculated damage
                calculated_dam = attributes[i]['DAMAGE']

                ref_dam = lembang_damage_function(calculated_mmi)
                msg = ('Calculated damage was not as expected '
                       'for hazard layer %s' % hazardfile)
                assert numpy.allclose(calculated_dam, ref_dam,
                                      rtol=1.0e-12), msg

                count += 1

            # Make only a few points were 0
            assert count > len(attributes) - 4
Exemple #45
0
    def test_data_resampling_example(self):
        """Raster data is unchanged when going through geonode

        """

        # Name file names for hazard level, exposure and expected fatalities
        hazard_filename = ('%s/maumere_aos_depth_20m_land_wgs84.asc'
                           % TESTDATA)
        exposure_filename = ('%s/maumere_pop_prj.shp' % TESTDATA)

        #------------
        # Hazard data
        #------------
        # Read hazard input data for reference
        H_ref = read_layer(hazard_filename)

        A_ref = H_ref.get_data()
        depth_min_ref, depth_max_ref = H_ref.get_extrema()

        # Upload to internal geonode
        hazard_layer = save_to_geonode(hazard_filename, user=self.user)
        hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

        # Download data again
        bbox = get_bounding_box_string(hazard_filename)  # The biggest
        H = download(INTERNAL_SERVER_URL, hazard_name, bbox)

        A = H.get_data()
        depth_min, depth_max = H.get_extrema()

        # FIXME (Ole): The layer read from file is single precision only:
        # Issue #17
        # Here's the explanation why interpolation below produce slightly
        # different results (but why?)
        # The layer read from file is single precision which may be due to
        # the way it is converted from ASC to TIF. In other words the
        # problem may be in raster.write_to_file. Float64 is
        # specified there, so this is a mystery.
        #print 'A', A.dtype          # Double precision
        #print 'A_ref', A_ref.dtype  # Single precision

        # Compare extrema to values from numpy array
        assert numpy.allclose(depth_max, numpy.nanmax(A),
                              rtol=1.0e-12, atol=1.0e-12)

        assert numpy.allclose(depth_max_ref, numpy.nanmax(A_ref),
                              rtol=1.0e-12, atol=1.0e-12)

        # Compare to reference
        assert numpy.allclose([depth_min, depth_max],
                              [depth_min_ref, depth_max_ref],
                              rtol=1.0e-12, atol=1.0e-12)

        # Compare extrema to values read off QGIS for this layer
        assert numpy.allclose([depth_min, depth_max], [0.0, 16.68],
                              rtol=1.0e-6, atol=1.0e-10)

        # Investigate difference visually
        #from matplotlib.pyplot import matshow, show
        #matshow(A)
        #matshow(A_ref)
        #matshow(A - A_ref)
        #show()

        #print
        for i in range(A.shape[0]):
            for j in range(A.shape[1]):
                if not numpy.isnan(A[i, j]):
                    err = abs(A[i, j] - A_ref[i, j])
                    if err > 0:
                        msg = ('%i, %i: %.15f, %.15f, %.15f'
                               % (i, j, A[i, j], A_ref[i, j], err))
                        raise Exception(msg)
                    #if A[i,j] > 16:
                    #    print i, j, A[i, j], A_ref[i, j]

        # Compare elements (nan & numbers)
        id_nan = numpy.isnan(A)
        id_nan_ref = numpy.isnan(A_ref)
        assert numpy.all(id_nan == id_nan_ref)
        assert numpy.allclose(A[-id_nan], A_ref[-id_nan],
                              rtol=1.0e-15, atol=1.0e-15)

        #print 'MAX', A[245, 283], A_ref[245, 283]
        #print 'MAX: %.15f %.15f %.15f' %(A[245, 283], A_ref[245, 283])
        assert numpy.allclose(A[245, 283], A_ref[245, 283],
                              rtol=1.0e-15, atol=1.0e-15)

        #--------------
        # Exposure data
        #--------------
        # Read exposure input data for reference
        E_ref = read_layer(exposure_filename)

        # Upload to internal geonode
        exposure_layer = save_to_geonode(exposure_filename, user=self.user)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        # Download data again
        E = download(INTERNAL_SERVER_URL, exposure_name, bbox)

        # Check exposure data against reference
        coordinates = E.get_geometry()
        coordinates_ref = E_ref.get_geometry()
        assert numpy.allclose(coordinates, coordinates_ref,
                              rtol=1.0e-12, atol=1.0e-12)

        attributes = E.get_data()
        attributes_ref = E_ref.get_data()
        for i, att in enumerate(attributes):
            att_ref = attributes_ref[i]
            for key in att:
                assert att[key] == att_ref[key]

        # Test riab's interpolation function
        I = H.interpolate(E, name='depth')
        icoordinates = I.get_geometry()

        I_ref = H_ref.interpolate(E_ref, name='depth')
        icoordinates_ref = I_ref.get_geometry()

        assert numpy.allclose(coordinates,
                              icoordinates,
                              rtol=1.0e-12, atol=1.0e-12)
        assert numpy.allclose(coordinates,
                              icoordinates_ref,
                              rtol=1.0e-12, atol=1.0e-12)

        iattributes = I.get_data()
        assert numpy.allclose(icoordinates, coordinates)

        N = len(icoordinates)
        assert N == 891

        # Set tolerance for single precision until issue #17 has been fixed
        # It appears that the single precision leads to larger interpolation
        # errors
        rtol_issue17 = 2.0e-3
        atol_issue17 = 1.0e-4

        # Verify interpolated values with test result
        for i in range(N):

            interpolated_depth_ref = I_ref.get_data()[i]['depth']
            interpolated_depth = iattributes[i]['depth']

            assert nanallclose(interpolated_depth,
                               interpolated_depth_ref,
                               rtol=rtol_issue17, atol=atol_issue17)

            pointid = attributes[i]['POINTID']

            if pointid == 263:

                #print i, pointid, attributes[i],
                #print interpolated_depth, coordinates[i]

                # Check that location is correct
                assert numpy.allclose(coordinates[i],
                                      [122.20367299, -8.61300358],
                                      rtol=1.0e-7, atol=1.0e-12)

                # This is known to be outside inundation area so should
                # near zero
                assert numpy.allclose(interpolated_depth, 0.0,
                                      rtol=1.0e-12, atol=1.0e-12)

            if pointid == 148:
                # Check that location is correct
                #print coordinates[i]
                assert numpy.allclose(coordinates[i],
                                      [122.2045912, -8.608483265],
                                      rtol=1.0e-7, atol=1.0e-12)

                # This is in an inundated area with a surrounding depths of
                # 4.531, 3.911
                # 2.675, 2.583
                assert interpolated_depth < 4.531
                assert interpolated_depth < 3.911
                assert interpolated_depth > 2.583
                assert interpolated_depth > 2.675

                #print interpolated_depth
                # This is a characterisation test for bilinear interpolation
                assert numpy.allclose(interpolated_depth, 3.62477215491,
                                      rtol=rtol_issue17, atol=1.0e-12)

            # Check that interpolated points are within range
            msg = ('Interpolated depth %f at point %i was outside extrema: '
                   '[%f, %f]. ' % (interpolated_depth, i,
                                   depth_min, depth_max))

            if not numpy.isnan(interpolated_depth):
                assert depth_min <= interpolated_depth <= depth_max, msg
    def test_metadata(self):
        """Metadata is retrieved correctly for both raster and vector data
        """

        # Upload test data
        filenames = ['Lembang_Earthquake_Scenario.asc',
                     'Earthquake_Ground_Shaking.asc',
                     'lembang_schools.shp',
                     'Padang_WGS84.shp']
        layers = []
        paths = []
        for filename in filenames:
            basename, ext = os.path.splitext(filename)

            path = os.path.join(TESTDATA, filename)
            layer = save_to_geonode(path, user=self.user, overwrite=True)

            # Record layer and file
            layers.append(layer)
            paths.append(path)

        # Check integrity
        for i, layer in enumerate(layers):

            if filenames[i].endswith('.shp'):
                layer_type = 'vector'
            elif filenames[i].endswith('.asc'):
                layer_type = 'raster'
            else:
                msg = ('Unknown layer extension in %s. '
                       'Expected .shp or .asc' % filenames[i])
                raise Exception(msg)

            layer_name = '%s:%s' % (layer.workspace, layer.name)
            metadata = get_metadata(INTERNAL_SERVER_URL,
                                    layer_name)

            assert 'id' in metadata
            assert 'title' in metadata
            assert 'layer_type' in metadata
            assert 'keywords' in metadata
            assert 'bounding_box' in metadata
            assert len(metadata['bounding_box']) == 4

            # Check integrity between Django layer and file
            assert_bounding_box_matches(layer, paths[i])

            # Check integrity between file and OWS metadata
            ref_bbox = get_bounding_box(paths[i])
            msg = ('Bounding box from OWS did not match bounding box '
                   'from file. They are\n'
                   'From file %s: %s\n'
                   'From OWS: %s' % (paths[i],
                                     ref_bbox,
                                     metadata['bounding_box']))

            assert numpy.allclose(metadata['bounding_box'],
                                  ref_bbox), msg
            assert layer.name == metadata['title']
            assert layer_name == metadata['id']
            assert layer_type == metadata['layer_type']

            # Check keywords
            if layer_type == 'raster':
                category = 'hazard'
                subcategory = 'earthquake'
            elif layer_type == 'vector':
                category = 'exposure'
                subcategory = 'building'
            else:
                msg = 'Unknown layer type %s' % layer_type
                raise Exception(msg)

            keywords = metadata['keywords']

            msg = 'Did not find key "category" in keywords: %s' % keywords
            assert 'category' in keywords, msg

            msg = 'Did not find key "subcategory" in keywords: %s' % keywords
            assert 'subcategory' in keywords, msg

            msg = ('Category keyword %s did not match expected %s'
                   % (keywords['category'], category))
            assert category == keywords['category'], msg

            msg = ('Subcategory keyword %s did not match expected %s'
                   % (keywords['subcategory'], category))
            assert subcategory == keywords['subcategory'], msg
Exemple #47
0
    def test_raster_scaling(self):
        """Raster layers can be scaled when resampled

        This is a test for ticket #168

        Native test .asc data has

        ncols         5525
        nrows         2050
        cellsize      0.0083333333333333

        Scaling is necessary for raster data that represents density
        such as population per km^2
        """

        for test_filename in [
                'Population_Jakarta_geographic.asc', 'Population_2010.asc'
        ]:

            raster_filename = ('%s/%s' % (TESTDATA, test_filename))

            # Get reference values
            R = read_layer(raster_filename)
            R_min_ref, R_max_ref = R.get_extrema()
            native_resolution = R.get_resolution()

            # Upload to internal geonode
            raster_layer = save_to_geonode(raster_filename, user=self.user)
            raster_name = '%s:%s' % (raster_layer.workspace, raster_layer.name)

            # Test for a range of resolutions
            for res in [
                    0.02,
                    0.01,
                    0.005,
                    0.002,
                    0.001,
                    0.0005,  # Coarser
                    0.0002
            ]:  # Finer

                # To save time don't do finest resolution for the
                # large population set
                if test_filename.startswith('Population_2010') and res < 0.005:
                    break

                bbox = get_bounding_box_string(raster_filename)

                R = download(INTERNAL_SERVER_URL,
                             raster_name,
                             bbox,
                             resolution=res)
                A_native = R.get_data(scaling=False)
                A_scaled = R.get_data(scaling=True)

                sigma = (R.get_resolution()[0] / native_resolution[0])**2

                # Compare extrema
                expected_scaled_max = sigma * numpy.nanmax(A_native)
                msg = ('Resampled raster was not rescaled correctly: '
                       'max(A_scaled) was %f but expected %f' %
                       (numpy.nanmax(A_scaled), expected_scaled_max))

                assert numpy.allclose(expected_scaled_max,
                                      numpy.nanmax(A_scaled),
                                      rtol=1.0e-8,
                                      atol=1.0e-8), msg

                expected_scaled_min = sigma * numpy.nanmin(A_native)
                msg = ('Resampled raster was not rescaled correctly: '
                       'min(A_scaled) was %f but expected %f' %
                       (numpy.nanmin(A_scaled), expected_scaled_min))
                assert numpy.allclose(expected_scaled_min,
                                      numpy.nanmin(A_scaled),
                                      rtol=1.0e-8,
                                      atol=1.0e-12), msg

                # Compare elementwise
                msg = 'Resampled raster was not rescaled correctly'
                assert nanallclose(A_native * sigma,
                                   A_scaled,
                                   rtol=1.0e-8,
                                   atol=1.0e-8), msg

                # Check that it also works with manual scaling
                A_manual = R.get_data(scaling=sigma)
                msg = 'Resampled raster was not rescaled correctly'
                assert nanallclose(A_manual,
                                   A_scaled,
                                   rtol=1.0e-8,
                                   atol=1.0e-8), msg

                # Check that an exception is raised for bad arguments
                try:
                    R.get_data(scaling='bad')
                except:
                    pass
                else:
                    msg = 'String argument should have raised exception'
                    raise Exception(msg)

                try:
                    R.get_data(scaling='(1, 3)')
                except:
                    pass
                else:
                    msg = 'Tuple argument should have raised exception'
                    raise Exception(msg)

                # Check None option without existence of density keyword
                A_none = R.get_data(scaling=None)
                msg = 'Data should not have changed'
                assert nanallclose(A_native,
                                   A_none,
                                   rtol=1.0e-12,
                                   atol=1.0e-12), msg

                # Try with None and density keyword
                R.keywords['density'] = 'true'
                A_none = R.get_data(scaling=None)
                msg = 'Resampled raster was not rescaled correctly'
                assert nanallclose(A_scaled,
                                   A_none,
                                   rtol=1.0e-12,
                                   atol=1.0e-12), msg

                R.keywords['density'] = 'Yes'
                A_none = R.get_data(scaling=None)
                msg = 'Resampled raster was not rescaled correctly'
                assert nanallclose(A_scaled,
                                   A_none,
                                   rtol=1.0e-12,
                                   atol=1.0e-12), msg

                R.keywords['density'] = 'False'
                A_none = R.get_data(scaling=None)
                msg = 'Data should not have changed'
                assert nanallclose(A_native,
                                   A_none,
                                   rtol=1.0e-12,
                                   atol=1.0e-12), msg

                R.keywords['density'] = 'no'
                A_none = R.get_data(scaling=None)
                msg = 'Data should not have changed'
                assert nanallclose(A_native,
                                   A_none,
                                   rtol=1.0e-12,
                                   atol=1.0e-12), msg
Exemple #48
0
    def test_earthquake_exposure_plugin(self):
        """Population exposure to individual MMI levels can be computed
        """

        # Upload exposure data for this test
        # FIXME (Ole): While this dataset is ok for testing,
        # note that is has been resampled without scaling
        # so numbers are about 25 times too large.
        # Consider replacing test populations dataset for good measures,
        # just in case any one accidentally started using this dataset
        # for real.

        name = 'Population_2010'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Upload hazard data
        filename = 'Lembang_Earthquake_Scenario.asc'
        hazard_filename = '%s/%s' % (TESTDATA, filename)
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user, overwrite=True)
        hazard_name = '%s:%s' % (hazard_layer.workspace,
                                 hazard_layer.name)

        # Check metadata
        assert_bounding_box_matches(hazard_layer, hazard_filename)
        haz_bbox_string = get_bounding_box_string(hazard_filename)
        check_layer(hazard_layer, full=True)

        # Run calculation
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                bbox=haz_bbox_string,
                impact_function='EarthquakePopulationExposureFunction',
                keywords='test,population,exposure,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        if 'errors' in data:
            errors = data['errors']
            if errors is not None:
                msg = ('The server returned the error message: %s'
                       % str(errors))
                raise Exception(msg)

        assert 'success' in data
        assert 'hazard_layer' in data
        assert 'exposure_layer' in data
        assert 'run_duration' in data
        assert 'run_date' in data
        assert 'layer' in data

        assert data['success']

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box_string(hazard_filename))
        assert os.path.exists(result_layer.filename)

        # Check calculated values
        keywords = result_layer.get_keywords()

        assert 'mmi-classes' in keywords
        assert 'affected-population' in keywords

        mmi_classes = [int(x) for x in keywords['mmi-classes'].split('_')]
        count = [float(x) for x in keywords['affected-population'].split('_')]

        # Brute force count for each population level
        population = download(INTERNAL_SERVER_URL,
                              exposure_name,
                              get_bounding_box_string(hazard_filename))
        intensity = download(INTERNAL_SERVER_URL,
                             hazard_name,
                             get_bounding_box_string(hazard_filename))

        # Extract data
        H = intensity.get_data(nan=0)
        P = population.get_data(nan=0)

        brutecount = {}
        for mmi in mmi_classes:
            brutecount[mmi] = 0

        for i in range(P.shape[0]):
            for j in range(P.shape[1]):
                mmi = H[i, j]
                if not numpy.isnan(mmi):
                    mmi_class = int(round(mmi))

                    pop = P[i, j]
                    if not numpy.isnan(pop):
                        brutecount[mmi_class] += pop

        for i, mmi in enumerate(mmi_classes):
            assert numpy.allclose(count[i], brutecount[mmi], rtol=1.0e-6)
    def test_raster_wcs_reprojection(self):
        """UTM Raster can be reprojected by Geoserver and downloaded correctly
        """
        # FIXME (Ole): Still need to do this with assertions

        filename = 'tsunami_max_inundation_depth_BB_utm.asc'
        projected_tif_file = os.path.join(TESTDATA, filename)

        #projected_tif = file_upload(projected_tif_file, overwrite=True)
        projected_tif = save_to_geonode(projected_tif_file,
                                        user=self.user,
                                        overwrite=True)
        check_layer(projected_tif)

        wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        #logger.info(wcs.contents)
        metadata = wcs.contents[projected_tif.typename]
        #logger.info(metadata.grid)
        bboxWGS84 = metadata.boundingBoxWGS84
        #logger.info(bboxWGS84)
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        #logger.info("resx=%s resy=%s" % (str(resx), str(resy)))
        formats = metadata.supportedFormats
        #logger.info(formats)
        supportedCRS = metadata.supportedCRS
        #logger.info(supportedCRS)
        width = metadata.grid.highlimits[0]
        height = metadata.grid.highlimits[1]
        #logger.info("width=%s height=%s" % (width, height))
        gs_cat = Layer.objects.gs_catalog
        cvg_store = gs_cat.get_store(projected_tif.name)
        cvg_layer = gs_cat.get_resource(projected_tif.name, store=cvg_store)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)

        # FIXME: A patch was submitted OWSlib 20110808
        # Can delete the following once patch appears
        # In the future get bboxNative and nativeSRS from get_metadata
        descCov = metadata._service.getDescribeCoverage(projected_tif.typename)
        envelope = (descCov.find(ns('CoverageOffering/') + ns('domainSet/') +
                                 ns('spatialDomain/') +
                                 '{http://www.opengis.net/gml}Envelope'))
        nativeSrs = envelope.attrib['srsName']
        #logger.info(nativeSrs)
        gmlpositions = envelope.findall('{http://www.opengis.net/gml}pos')
        lc = gmlpositions[0].text
        uc = gmlpositions[1].text
        bboxNative = (float(lc.split()[0]), float(lc.split()[1]),
                      float(uc.split()[0]), float(uc.split()[1]))
        #logger.info(bboxNative)
        # ---- END PATCH

        # Make a temp dir to store the saved files
        tempdir = '/tmp/%s' % str(time.time())
        os.mkdir(tempdir)

        # Check that the layer can be downloaded in its native projection
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % (nativeSrs, t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # _exactly_ the same size and bbox of the original

        # Test that the layer can be downloaded in ARCGRID format
        cvg_layer.supported_formats = cvg_layer.supported_formats + ['ARCGRID']
        gs_cat.save(cvg_layer)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='ARCGRID',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("ARCGRID in %s = %s" % (nativeSrs, t.name))
        # Check that the downloaded file is a valid ARCGRID file and that it
        # the required projection information
        # (FIXME: There is no prj file here. GS bug)

        # Check that the layer can downloaded in WGS84
        cvg_layer.request_srs_list += ['EPSG:4326']
        cvg_layer.response_srs_list += ['EPSG:4326']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:4326',
                bbox=bboxWGS84,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % ("EPSG:4326", t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # the correct size and bbox based on the resx and resy or width
        # and height specified

        # Check that we can download the layer in another projection
        cvg_layer.request_srs_list += ['EPSG:32356']
        cvg_layer.response_srs_list += ['EPSG:32356']
        cvg_layer.request_srs_list += ['EPSG:900913']
        cvg_layer.response_srs_list += ['EPSG:900913']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        # How do we get the bboxes for the newly assigned
        # request/response SRS??

        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:32356',  # Should not be hardcoded for a test,
                                   # or should use 900913 (need bbox)
                bbox=bboxNative,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
Exemple #50
0
    def test_padang_building_examples(self):
        """Padang building impact calculation works through the API
        """

        # Test for a range of hazard layers
        for mmi_filename in ['Shakemap_Padang_2009.asc']:
                               #'Lembang_Earthquake_Scenario.asc']:

            # Upload input data
            hazardfile = os.path.join(TESTDATA, mmi_filename)
            hazard_layer = save_to_geonode(hazardfile, user=self.user)
            hazard_name = '%s:%s' % (hazard_layer.workspace,
                                        hazard_layer.name)

            exposurefile = os.path.join(TESTDATA, 'Padang_WGS84.shp')
            exposure_layer = save_to_geonode(exposurefile, user=self.user)
            exposure_name = '%s:%s' % (exposure_layer.workspace,
                                          exposure_layer.name)

            # Call calculation routine

            # FIXME (Ole): The system freaks out if there are spaces in
            #              bbox string. Please let us catch that and deal
            #              nicely with it - also do this in download()
            bbox = '96.956, -5.51, 104.63933, 2.289497'

            with warnings.catch_warnings():
                warnings.simplefilter('ignore')

                c = Client()
                rv = c.post('/impact/api/calculate/', data=dict(
                            hazard_server=INTERNAL_SERVER_URL,
                            hazard=hazard_name,
                            exposure_server=INTERNAL_SERVER_URL,
                            exposure=exposure_name,
                            bbox=bbox,
                            impact_function='Padang Earthquake ' \
                                            'Building Damage Function',
                            keywords='test,buildings,padang',
                            ))

                self.assertEqual(rv.status_code, 200)
                self.assertEqual(rv['Content-Type'], 'application/json')
                data = json.loads(rv.content)
                assert 'hazard_layer' in data.keys()
                assert 'exposure_layer' in data.keys()
                assert 'run_duration' in data.keys()
                assert 'run_date' in data.keys()
                assert 'layer' in data.keys()

                # Download result and check
                layer_name = data['layer'].split('/')[-1]

                result_layer = download(INTERNAL_SERVER_URL,
                                       layer_name,
                                       bbox)
                assert os.path.exists(result_layer.filename)

                # Read hazard data for reference
                hazard_raster = read_layer(hazardfile)
                A = hazard_raster.get_data()
                mmi_min, mmi_max = hazard_raster.get_extrema()

                # Read calculated result
                impact_vector = read_layer(result_layer.filename)
                coordinates = impact_vector.get_geometry()
                attributes = impact_vector.get_data()

                # Verify calculated result
                count = 0
                verified_count = 0
                for i in range(len(attributes)):
                    lon, lat = coordinates[i][:]
                    calculated_mmi = attributes[i]['MMI']

                    if calculated_mmi == 0.0:
                        # FIXME (Ole): Some points have MMI==0 here.
                        # Weird but not a show stopper
                        continue

                    # Check that interpolated points are within range
                    msg = ('Interpolated mmi %f was outside extrema: '
                           '[%f, %f] at location '
                           '[%f, %f]. ' % (calculated_mmi,
                                           mmi_min, mmi_max,
                                           lon, lat))
                    assert mmi_min <= calculated_mmi <= mmi_max, msg

                    building_class = attributes[i]['TestBLDGCl']

                    # Check calculated damage
                    calculated_dam = attributes[i]['DAMAGE']
                    verified_dam = padang_check_results(calculated_mmi,
                                                        building_class)
                    #print calculated_mmi, building_class, calculated_dam
                    if verified_dam:
                        msg = ('Calculated damage was not as expected '
                                 'for hazard layer %s. I got %f '
                               'but expected %f' % (hazardfile,
                                                    calculated_dam,
                                                    verified_dam))
                        assert numpy.allclose(calculated_dam, verified_dam,
                                               rtol=1.0e-4), msg
                        verified_count += 1
                    count += 1

                msg = ('No points was verified in output. Please create '
                       'table withe reference data')
                assert verified_count > 0, msg
                msg = 'Number buildings was not 3896.'
                assert count == 3896, msg
Exemple #51
0
    def test_exceptions_in_calculate_endpoint(self):
        """Wrong bbox input is handled nicely by /impact/api/calculate/
        """

        # Upload input data
        hazardfile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        hazard_layer = save_to_geonode(hazardfile, user=self.user)
        hazard_name = '%s:%s' % (hazard_layer.workspace, hazard_layer.name)

        exposurefile = os.path.join(TESTDATA, 'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposurefile, user=self.user)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        bbox_correct = '105.592,-7.809,110.159,-5.647'
        bbox_with_spaces = '105.592, -7.809, 110.159, -5.647'
        bbox_non_numeric = '105.592,-7.809,x,-5.647'
        bbox_list = [1, 2, 3, 4]
        bbox_list_non_numeric = [1, '2', 3, 4]
        bbox_none = None
        bbox_wrong_number1 = '105.592,-7.809,-5.647'
        bbox_wrong_number2 = '105.592,-7.809,-5.647,34,123'
        bbox_empty = ''
        bbox_inconsistent1 = '110,-7.809,105,-5.647'
        bbox_inconsistent2 = '105.592,-5,110.159,-7'
        bbox_out_of_bound1 = '-185.592,-7.809,110.159,-5.647'
        bbox_out_of_bound2 = '105.592,-97.809,110.159,-5.647'
        bbox_out_of_bound3 = '105.592,-7.809,189.159,-5.647'
        bbox_out_of_bound4 = '105.592,-7.809,110.159,-105.647'

        data = dict(hazard_server=INTERNAL_SERVER_URL,
                    hazard=hazard_name,
                    exposure_server=INTERNAL_SERVER_URL,
                    exposure=exposure_name,
                    bbox=bbox_correct,
                    impact_function='Earthquake Building Damage Function',
                    keywords='test,schools,lembang')

        # First do it correctly (twice)
        c = Client()
        rv = c.post('/impact/api/calculate/', data=data)
        rv = c.post('/impact/api/calculate/', data=data)

        # Then check that spaces are dealt with correctly
        data['bbox'] = bbox_with_spaces
        rv = c.post('/impact/api/calculate/', data=data)

        # Then with a range of wrong bbox inputs
        for bad_bbox in [bbox_list,
                         bbox_none,
                         bbox_empty,
                         bbox_non_numeric,
                         bbox_list_non_numeric,
                         bbox_wrong_number1,
                         bbox_wrong_number2,
                         bbox_inconsistent1,
                         bbox_inconsistent2,
                         bbox_out_of_bound1,
                         bbox_out_of_bound2,
                         bbox_out_of_bound3,
                         bbox_out_of_bound4]:

            # Use erroneous bounding box
            data['bbox'] = bad_bbox

            # FIXME (Ole): Suppress error output from c.post
            rv = c.post('/impact/api/calculate/', data=data)
            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv['Content-Type'], 'application/json')
            data_out = json.loads(rv.content)

            msg = ('Bad bounding box %s should have raised '
                       'an error' % bad_bbox)
            assert 'errors' in data_out, msg