示例#1
0
文件: test_api.py 项目: uniomni/riab
    def test_calculate_school_damage(self):
        """Earthquake school damage calculation works via the HTTP REST API
        """

        # Upload required data first
        for filename in ['lembang_mmi_hazmap.asc',
                         'lembang_schools.shp']:
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
            check_layer(uploaded, full=True)

        # Run calculation through API
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                   hazard_server=INTERNAL_SERVER_URL,
                   hazard='geonode:lembang_mmi_hazmap',
                   exposure_server=INTERNAL_SERVER_URL,
                   exposure='geonode:lembang_schools',
                   bbox='105.592,-7.809,110.159,-5.647',
                   impact_function='Earthquake Building Damage Function',
                   keywords='test,schools,lembang',
        ))
        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()
示例#2
0
    def test_plugin_compatibility(self):
        """Default plugins perform as expected
        """

        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'shakemap_padang_20090930.asc')
        hazard_layer = save_to_geonode(hazard_filename)
        check_layer(hazard_layer, full=True)

        exposure_filename = os.path.join(TESTDATA,
                                         'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Test
        plugin_list = get_plugins()
        assert len(plugin_list) > 0

        geoserver = {'url': settings.GEOSERVER_BASE_URL + 'ows',
                     'name': 'Local Geoserver',
                     'version': '1.0.0',
                     'id': 0}
        metadata = get_layer_descriptors(geoserver['url'])

        msg = 'There were no layers in test geoserver'
        assert len(metadata) > 0, msg

        # Characterisation test to preserve the behaviour of
        # get_layer_descriptors. FIXME: I think we should change this to be
        # a dictionary of metadata entries (ticket #126).
        reference = [['geonode:lembang_schools',
                      {'layer_type': 'feature',
                       'category': 'exposure',
                       'subcategory': 'building',
                       'title': 'lembang_schools'}],
                     ['geonode:shakemap_padang_20090930',
                      {'layer_type': 'raster',
                       'category': 'hazard',
                       'subcategory': 'earthquake',
                       'title': 'shakemap_padang_20090930'}]]

        for entry in reference:
            name, mdblock = entry

            i = [x[0] for x in metadata].index(name)

            assert name == metadata[i][0]
            for key in entry[1]:
                assert entry[1][key] == metadata[i][1][key]

        # Check plugins are returned
        annotated_plugins = [{'name': name,
                              'doc': f.__doc__,
                              'layers': compatible_layers(f, metadata)}
                             for name, f in plugin_list.items()]

        msg = 'No compatible layers returned'
        assert len(annotated_plugins) > 0, msg
示例#3
0
    def test_shapefile(self):
        """Shapefile can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_schools.shp')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        assert isinstance(layer.geographic_bounding_box, basestring)
示例#4
0
    def test_shapefile(self):
        """Shapefile can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_schools.shp')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        assert isinstance(layer.geographic_bounding_box, basestring)
示例#5
0
    def test_calculate_fatality(self):
        """Earthquake fatalities calculation via the HTTP Rest API is correct
        """

        # Upload required data first
        for filename in ['Earthquake_Ground_Shaking.asc',
                         'Population_2010_clip.tif']:
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
            check_layer(uploaded, full=True)

        # Run calculation through API
        c = Client()
        rv = c.post('/impact/api/calculate/',
                    dict(hazard_server=INTERNAL_SERVER_URL,
                         hazard='geonode:earthquake_ground_shaking',
                         exposure='geonode:population_2010_clip',
                         exposure_server=INTERNAL_SERVER_URL,
                         bbox='99.36,-2.199,102.237,0.00',
                         impact_function='Earthquake Fatality Function',
                         keywords='test,earthquake,fatality'))

        msg = 'Expected status code 200, got %i' % rv.status_code
        self.assertEqual(rv.status_code, 200), msg

        msg = ('Expected Content-Type "application/json", '
               'got %s' % rv['Content-Type'])
        self.assertEqual(rv['Content-Type'], 'application/json'), msg

        data = json.loads(rv.content)

        if data['stacktrace'] is not None:
            msg = data['stacktrace']
            raise Exception(msg)

        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()
        assert 'bbox' in data.keys()
        assert 'impact_function' in data.keys()

        layer_uri = data['layer']

        #FIXME: This is not a good way to access the layer name
        typename = layer_uri.split('/')[4]
        name = typename.split(':')[1]

        # Check the autogenerated styles were correctly uploaded
        layer = Layer.objects.get(name=name)

        msg = ('A new style should have been created for layer [%s] '
               'got [%s] style instead.' % (name, layer.default_style.name))
        assert layer.default_style.name == name, msg
示例#6
0
    def test_another_asc(self):
        """Real world ASCII file can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        # Verify metadata
        layer_name = '%s:%s' % (layer.workspace, layer.name)
        metadata = get_metadata(INTERNAL_SERVER_URL, layer_name)
        assert 'id' in metadata
        assert 'title' in metadata
        assert 'layer_type' in metadata
        assert 'keywords' in metadata
        assert 'bounding_box' in metadata
        assert 'geotransform' in metadata
        assert len(metadata['bounding_box']) == 4

        # A little metadata characterisation test
        # FIXME (Ole): Get this right when new resolution keyword
        # has been fully sorted out. There are 3 other tests failing at
        # the moment
        ref = {
            'layer_type':
            'raster',
            'keywords': {
                'category': 'hazard',
                'subcategory': 'earthquake',
                'resolution': '0.0112'
            },
            'geotransform':
            (105.29857, 0.0112, 0.0, -5.565233000000001, 0.0, -0.0112),
            'resolution':
            0.0112,
            'title':
            'lembang_mmi_hazmap'
        }

        for key in ['layer_type', 'keywords', 'geotransform', 'title']:

            if key == 'keywords':
                kwds = metadata[key]
                for k in kwds:
                    assert kwds[k] == ref[key][k]
            else:
                msg = ('Expected metadata for key %s to be %s. '
                       'Instead got %s' % (key, ref[key], metadata[key]))
                if key in ['geotransform', 'resolution']:
                    assert numpy.allclose(metadata[key], ref[key]), msg
                else:
                    assert metadata[key] == ref[key], msg
示例#7
0
    def test_plugin_selection(self):
        """Verify the plugins can recognize compatible layers.
        """
        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'Earthquake_Ground_Shaking.asc')
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user,
                                       overwrite=True)
        check_layer(hazard_layer, full=True)

        msg = 'No keywords found in layer %s' % hazard_layer.name
        assert len(hazard_layer.keywords) > 0, msg

        exposure_filename = os.path.join(TESTDATA,
                                         'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)
        msg = 'No keywords found in layer %s' % exposure_layer.name
        assert len(exposure_layer.keywords) > 0, msg

        c = Client()
        rv = c.post('/impact/api/functions/', data={})

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)

        assert 'functions' in data

        functions = data['functions']

        # FIXME (Ariel): This test should implement an alternative function to
        # parse the requirements, but for now it will just take the buildings
        # damage one.
        for function in functions:
            if function['name'] == 'Earthquake Building Damage Function':
                layers = function['layers']

                msg_tmpl = 'Expected layer %s in list of compatible layers: %s'

                hazard_msg = msg_tmpl % (hazard_layer.typename, layers)
                assert hazard_layer.typename in layers, hazard_msg

                exposure_msg = msg_tmpl % (exposure_layer.typename, layers)
                assert exposure_layer.typename in layers, exposure_msg
示例#8
0
    def test_plugin_selection(self):
        """Verify the plugins can recognize compatible layers.
        """
        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'Earthquake_Ground_Shaking.asc')
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user,
                                       overwrite=True)
        check_layer(hazard_layer, full=True)

        msg = 'No keywords found in layer %s' % hazard_layer.name
        assert len(hazard_layer.keywords) > 0, msg

        exposure_filename = os.path.join(TESTDATA, 'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)
        msg = 'No keywords found in layer %s' % exposure_layer.name
        assert len(exposure_layer.keywords) > 0, msg

        c = Client()
        rv = c.post('/impact/api/functions/', data={})

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)

        assert 'functions' in data

        functions = data['functions']

        # FIXME (Ariel): This test should implement an alternative function to
        # parse the requirements, but for now it will just take the buildings
        # damage one.
        for function in functions:
            if function['name'] == 'Earthquake Building Damage Function':
                layers = function['layers']

                msg_tmpl = 'Expected layer %s in list of compatible layers: %s'

                hazard_msg = msg_tmpl % (hazard_layer.typename, layers)
                assert hazard_layer.typename in layers, hazard_msg

                exposure_msg = msg_tmpl % (exposure_layer.typename, layers)
                assert exposure_layer.typename in layers, exposure_msg
示例#9
0
    def test_cleanup(self):
        """Cleanup functions in the utils module work
        """
        from geonode.maps.utils import cleanup

        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(uploaded, full=True)

        name = uploaded.name
        uuid = uploaded.uuid
        pk = uploaded.pk

        # try calling the cleanup function when the django record exists:
        try:
            cleanup(name, uuid)
        except GeoNodeException, e:
            pass
示例#10
0
    def test_cleanup(self):
        """Cleanup functions in the utils module work
        """
        from geonode.maps.utils import cleanup

        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(uploaded, full=True)

        name = uploaded.name
        uuid = uploaded.uuid
        pk = uploaded.pk

        # try calling the cleanup function when the django record exists:
        try:
            cleanup(name, uuid)
        except GeoNodeException, e:
            pass
示例#11
0
    def test_another_asc(self):
        """Real world ASCII file can be uploaded
        """
        thefile = os.path.join(TESTDATA, 'lembang_mmi_hazmap.asc')
        layer = save_to_geonode(thefile, user=self.user, overwrite=True)
        check_layer(layer, full=True)

        # Verify metadata
        layer_name = '%s:%s' % (layer.workspace, layer.name)
        metadata = get_metadata(INTERNAL_SERVER_URL,
                                layer_name)
        assert 'id' in metadata
        assert 'title' in metadata
        assert 'layer_type' in metadata
        assert 'keywords' in metadata
        assert 'bounding_box' in metadata
        assert 'geotransform' in metadata
        assert len(metadata['bounding_box']) == 4

        # A little metadata characterisation test
        ref = {'layer_type': 'raster',
               'keywords': {'category': 'hazard',
                            'subcategory': 'earthquake'},
               'geotransform': (105.29857, 0.0112, 0.0,
                                -5.565233000000001, 0.0, -0.0112),
               'title': 'lembang_mmi_hazmap'}

        for key in ['layer_type', 'keywords', 'geotransform',
                    'title']:

            msg = ('Expected metadata for key %s to be %s. '
                   'Instead got %s' % (key, ref[key], metadata[key]))
            if key == 'geotransform':
                assert numpy.allclose(metadata[key], ref[key]), msg
            else:
                assert metadata[key] == ref[key], msg

            if key == 'keywords':
                kwds = metadata[key]
                for k in kwds:
                    assert kwds[k] == ref[key][k]
示例#12
0
    def test_calculate_school_damage(self):
        """Earthquake school damage calculation works via the HTTP REST API
        """

        # Upload required data first
        for filename in ['lembang_mmi_hazmap.asc',
                         'lembang_schools.shp']:
            thefile = os.path.join(TESTDATA, filename)
            uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
            check_layer(uploaded, full=True)

        # Run calculation through API
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                   hazard_server=INTERNAL_SERVER_URL,
                   hazard='geonode:lembang_mmi_hazmap',
                   exposure_server=INTERNAL_SERVER_URL,
                   exposure='geonode:lembang_schools',
                   bbox='105.592,-7.809,110.159,-5.647',
                   impact_function='Earthquake Building Damage Function',
                   keywords='test,schools,lembang',
        ))

        msg = 'Expected status code 200, got %i' % rv.status_code
        self.assertEqual(rv.status_code, 200), msg

        msg = ('Expected Content-Type "application/json", '
               'got %s' % rv['Content-Type'])
        self.assertEqual(rv['Content-Type'], 'application/json'), msg

        data = json.loads(rv.content)

        if data['stacktrace'] is not None:
            msg = data['stacktrace']
            raise Exception(msg)

        assert 'hazard_layer' in data.keys()
        assert 'exposure_layer' in data.keys()
        assert 'run_duration' in data.keys()
        assert 'run_date' in data.keys()
        assert 'layer' in data.keys()
示例#13
0
    def test_repeated_upload(self):
        """The same file can be uploaded more than once
        """
        thefile = os.path.join(TESTDATA, 'test_grid.asc')
        uploaded1 = save_to_geonode(thefile, overwrite=True, user=self.user)
        check_layer(uploaded1, full=True)
        uploaded2 = save_to_geonode(thefile, overwrite=True, user=self.user)
        check_layer(uploaded2, full=True)
        uploaded3 = save_to_geonode(thefile, overwrite=False, user=self.user)
        check_layer(uploaded3, full=True)

        msg = ('Expected %s but got %s' % (uploaded1.name, uploaded2.name))
        assert uploaded1.name == uploaded2.name, msg

        msg = ('Expected a different name when uploading %s using '
               'overwrite=False but got %s' % (thefile, uploaded3.name))
        assert uploaded1.name != uploaded3.name, msg
示例#14
0
    def test_repeated_upload(self):
        """The same file can be uploaded more than once
        """
        thefile = os.path.join(TESTDATA, 'test_grid.asc')
        uploaded1 = save_to_geonode(thefile, overwrite=True,
                                    user=self.user)
        check_layer(uploaded1, full=True)
        uploaded2 = save_to_geonode(thefile, overwrite=True,
                                    user=self.user)
        check_layer(uploaded2, full=True)
        uploaded3 = save_to_geonode(thefile, overwrite=False,
                                    user=self.user)
        check_layer(uploaded3, full=True)

        msg = ('Expected %s but got %s' % (uploaded1.name, uploaded2.name))
        assert uploaded1.name == uploaded2.name, msg

        msg = ('Expected a different name when uploading %s using '
               'overwrite=False but got %s' % (thefile, uploaded3.name))
        assert uploaded1.name != uploaded3.name, msg
示例#15
0
 def test_asc(self):
     """ASCII file can be uploaded
     """
     thefile = os.path.join(TESTDATA, 'test_grid.asc')
     uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
     check_layer(uploaded, full=True)
示例#16
0
 def test_tiff(self):
     """GeoTIF file can be uploaded
     """
     thefile = os.path.join(TESTDATA, 'Population_2010_clip.tif')
     uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
     check_layer(uploaded, full=True)
示例#17
0
    def test_plugin_compatibility(self):
        """Default plugins perform as expected
        """

        # Upload a raster and a vector data set
        hazard_filename = os.path.join(TESTDATA,
                                       'shakemap_padang_20090930.asc')
        hazard_layer = save_to_geonode(hazard_filename)
        check_layer(hazard_layer, full=True)

        exposure_filename = os.path.join(TESTDATA, 'lembang_schools.shp')
        exposure_layer = save_to_geonode(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Test
        plugin_list = get_plugins()
        assert len(plugin_list) > 0

        geoserver = {
            'url': settings.GEOSERVER_BASE_URL + 'ows',
            'name': 'Local Geoserver',
            'version': '1.0.0',
            'id': 0
        }
        metadata = get_layer_descriptors(geoserver['url'])

        msg = 'There were no layers in test geoserver'
        assert len(metadata) > 0, msg

        # Characterisation test to preserve the behaviour of
        # get_layer_descriptors. FIXME: I think we should change this to be
        # a dictionary of metadata entries (ticket #126).
        reference = [[
            'geonode:lembang_schools', {
                'layer_type': 'vector',
                'category': 'exposure',
                'subcategory': 'building',
                'title': 'lembang_schools'
            }
        ],
                     [
                         'geonode:shakemap_padang_20090930', {
                             'layer_type': 'raster',
                             'category': 'hazard',
                             'subcategory': 'earthquake',
                             'title': 'shakemap_padang_20090930'
                         }
                     ]]

        for entry in reference:
            name, mdblock = entry

            i = [x[0] for x in metadata].index(name)

            msg = 'Got name %s, expected %s' % (name, metadata[i][0])
            assert name == metadata[i][0], msg
            for key in entry[1]:
                refval = entry[1][key]
                val = metadata[i][1][key]
                msg = ('Got value "%s" for key "%s" '
                       'Expected "%s"' % (val, key, refval))
                assert refval == val, msg

        # Check plugins are returned
        annotated_plugins = [{
            'name': name,
            'doc': f.__doc__,
            'layers': compatible_layers(f, metadata)
        } for name, f in plugin_list.items()]

        msg = 'No compatible layers returned'
        assert len(annotated_plugins) > 0, msg
示例#18
0
    def test_raster_wcs_reprojection(self):
        """UTM Raster can be reprojected by Geoserver and downloaded correctly
        """
        # FIXME (Ole): Still need to do this with assertions

        filename = 'tsunami_max_inundation_depth_BB_utm.asc'
        projected_tif_file = os.path.join(TESTDATA, filename)

        #projected_tif = file_upload(projected_tif_file, overwrite=True)
        projected_tif = save_to_geonode(projected_tif_file,
                                        user=self.user,
                                        overwrite=True)
        check_layer(projected_tif)

        wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        #logger.info(wcs.contents)
        metadata = wcs.contents[projected_tif.typename]
        #logger.info(metadata.grid)
        bboxWGS84 = metadata.boundingBoxWGS84
        #logger.info(bboxWGS84)
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        #logger.info("resx=%s resy=%s" % (str(resx), str(resy)))
        formats = metadata.supportedFormats
        #logger.info(formats)
        supportedCRS = metadata.supportedCRS
        #logger.info(supportedCRS)
        width = metadata.grid.highlimits[0]
        height = metadata.grid.highlimits[1]
        #logger.info("width=%s height=%s" % (width, height))
        gs_cat = Layer.objects.gs_catalog
        cvg_store = gs_cat.get_store(projected_tif.name)
        cvg_layer = gs_cat.get_resource(projected_tif.name, store=cvg_store)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)

        # FIXME: A patch was submitted OWSlib 20110808
        # Can delete the following once patch appears
        # In the future get bboxNative and nativeSRS from get_metadata
        descCov = metadata._service.getDescribeCoverage(projected_tif.typename)
        envelope = (descCov.find(ns('CoverageOffering/') + ns('domainSet/') +
                                 ns('spatialDomain/') +
                                 '{http://www.opengis.net/gml}Envelope'))
        nativeSrs = envelope.attrib['srsName']
        #logger.info(nativeSrs)
        gmlpositions = envelope.findall('{http://www.opengis.net/gml}pos')
        lc = gmlpositions[0].text
        uc = gmlpositions[1].text
        bboxNative = (float(lc.split()[0]), float(lc.split()[1]),
                      float(uc.split()[0]), float(uc.split()[1]))
        #logger.info(bboxNative)
        # ---- END PATCH

        # Make a temp dir to store the saved files
        tempdir = '/tmp/%s' % str(time.time())
        os.mkdir(tempdir)

        # Check that the layer can be downloaded in its native projection
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % (nativeSrs, t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # _exactly_ the same size and bbox of the original

        # Test that the layer can be downloaded in ARCGRID format
        cvg_layer.supported_formats = cvg_layer.supported_formats + ['ARCGRID']
        gs_cat.save(cvg_layer)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='ARCGRID',
                crs=nativeSrs,
                bbox=bboxNative,
                resx=resx,
                resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("ARCGRID in %s = %s" % (nativeSrs, t.name))
        # Check that the downloaded file is a valid ARCGRID file and that it
        # the required projection information
        # (FIXME: There is no prj file here. GS bug)

        # Check that the layer can downloaded in WGS84
        cvg_layer.request_srs_list += ['EPSG:4326']
        cvg_layer.response_srs_list += ['EPSG:4326']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:4326',
                bbox=bboxWGS84,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                    dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % ("EPSG:4326", t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # the correct size and bbox based on the resx and resy or width
        # and height specified

        # Check that we can download the layer in another projection
        cvg_layer.request_srs_list += ['EPSG:32356']
        cvg_layer.response_srs_list += ['EPSG:32356']
        cvg_layer.request_srs_list += ['EPSG:900913']
        cvg_layer.response_srs_list += ['EPSG:900913']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        # How do we get the bboxes for the newly assigned
        # request/response SRS??

        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                format='GeoTIFF',
                crs='EPSG:32356',  # Should not be hardcoded for a test,
                                   # or should use 900913 (need bbox)
                bbox=bboxNative,
                #resx=0.000202220898116, # Should NOT be hard-coded!
                                         # How do we convert
                #resy=0.000202220898116) # See comments in riab issue #103
                width=width,
                height=height)

        t = tempfile.NamedTemporaryFile(delete=False,
                                        dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
示例#19
0
 def test_tiff(self):
     """GeoTIF file can be uploaded
     """
     thefile = os.path.join(TESTDATA, 'Population_2010_clip.tif')
     uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
     check_layer(uploaded, full=True)
示例#20
0
    def test_linked_datasets(self):
        """Linked datesets can be pulled in e.g. to include gender break down
        """

        # Upload exposure data for this test. This will automatically
        # pull in female_pct_yogya.asc through its "associates" keyword
        name = 'population_yogya'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Upload hazard data
        filename = 'eq_yogya_2006.asc'
        hazard_filename = '%s/%s' % (TESTDATA, filename)
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user, overwrite=True)
        hazard_name = '%s:%s' % (hazard_layer.workspace,
                                 hazard_layer.name)

        # Check metadata
        assert_bounding_box_matches(hazard_layer, hazard_filename)
        haz_bbox_string = get_bounding_box_string(hazard_filename)
        check_layer(hazard_layer, full=True)

        # Run calculation
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                bbox=haz_bbox_string,
                impact_function='EarthquakeFatalityFunction',
                keywords='test,fatalities,population,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        if 'errors' in data:
            errors = data['errors']
            if errors is not None:
                msg = ('The server returned the error message: %s'
                       % str(errors))
                raise Exception(msg)

        assert 'success' in data
        assert 'hazard_layer' in data
        assert 'exposure_layer' in data
        assert 'run_duration' in data
        assert 'run_date' in data
        assert 'layer' in data

        assert data['success']

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box_string(hazard_filename))
        assert os.path.exists(result_layer.filename)

        # Check calculated values
        keywords = result_layer.get_keywords()

        assert 'caption' in keywords
示例#21
0
    def test_earthquake_exposure_plugin(self):
        """Population exposure to individual MMI levels can be computed
        """

        # Upload exposure data for this test
        # FIXME (Ole): While this dataset is ok for testing,
        # note that is has been resampled without scaling
        # so numbers are about 25 times too large.
        # Consider replacing test populations dataset for good measures,
        # just in case any one accidentally started using this dataset
        # for real.

        name = 'Population_2010'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)
        exposure_name = '%s:%s' % (exposure_layer.workspace,
                                   exposure_layer.name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Upload hazard data
        filename = 'Lembang_Earthquake_Scenario.asc'
        hazard_filename = '%s/%s' % (TESTDATA, filename)
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user, overwrite=True)
        hazard_name = '%s:%s' % (hazard_layer.workspace,
                                 hazard_layer.name)

        # Check metadata
        assert_bounding_box_matches(hazard_layer, hazard_filename)
        haz_bbox_string = get_bounding_box_string(hazard_filename)
        check_layer(hazard_layer, full=True)

        # Run calculation
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                bbox=haz_bbox_string,
                impact_function='EarthquakePopulationExposureFunction',
                keywords='test,population,exposure,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        if 'errors' in data:
            errors = data['errors']
            if errors is not None:
                msg = ('The server returned the error message: %s'
                       % str(errors))
                raise Exception(msg)

        assert 'success' in data
        assert 'hazard_layer' in data
        assert 'exposure_layer' in data
        assert 'run_duration' in data
        assert 'run_date' in data
        assert 'layer' in data

        assert data['success']

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box_string(hazard_filename))
        assert os.path.exists(result_layer.filename)

        # Check calculated values
        keywords = result_layer.get_keywords()

        assert 'mmi-classes' in keywords
        assert 'affected-population' in keywords

        mmi_classes = [int(x) for x in keywords['mmi-classes'].split('_')]
        count = [float(x) for x in keywords['affected-population'].split('_')]

        # Brute force count for each population level
        population = download(INTERNAL_SERVER_URL,
                              exposure_name,
                              get_bounding_box_string(hazard_filename))
        intensity = download(INTERNAL_SERVER_URL,
                             hazard_name,
                             get_bounding_box_string(hazard_filename))

        # Extract data
        H = intensity.get_data(nan=0)
        P = population.get_data(nan=0)

        brutecount = {}
        for mmi in mmi_classes:
            brutecount[mmi] = 0

        for i in range(P.shape[0]):
            for j in range(P.shape[1]):
                mmi = H[i, j]
                if not numpy.isnan(mmi):
                    mmi_class = int(round(mmi))

                    pop = P[i, j]
                    if not numpy.isnan(pop):
                        brutecount[mmi_class] += pop

        for i, mmi in enumerate(mmi_classes):
            assert numpy.allclose(count[i], brutecount[mmi], rtol=1.0e-6)
示例#22
0
    def test_raster_wcs_reprojection(self):
        """UTM Raster can be reprojected by Geoserver and downloaded correctly
        """
        # FIXME (Ole): Jeff needs to do this with assertions (ticket #40)

        filename = 'tsunami_max_inundation_depth_BB_utm.asc'
        projected_tif_file = os.path.join(TESTDATA, filename)

        #projected_tif = file_upload(projected_tif_file, overwrite=True)
        projected_tif = save_to_geonode(projected_tif_file,
                                        user=self.user,
                                        overwrite=True)
        check_layer(projected_tif)

        wcs_url = settings.GEOSERVER_BASE_URL + 'wcs'
        wcs = WebCoverageService(wcs_url, version='1.0.0')
        #logger.info(wcs.contents)
        metadata = wcs.contents[projected_tif.typename]
        #logger.info(metadata.grid)
        bboxWGS84 = metadata.boundingBoxWGS84
        #logger.info(bboxWGS84)
        resx = metadata.grid.offsetvectors[0][0]
        resy = abs(float(metadata.grid.offsetvectors[1][1]))
        #logger.info("resx=%s resy=%s" % (str(resx), str(resy)))
        formats = metadata.supportedFormats
        #logger.info(formats)
        supportedCRS = metadata.supportedCRS
        #logger.info(supportedCRS)
        width = metadata.grid.highlimits[0]
        height = metadata.grid.highlimits[1]
        #logger.info("width=%s height=%s" % (width, height))
        gs_cat = Layer.objects.gs_catalog
        cvg_store = gs_cat.get_store(projected_tif.name)
        cvg_layer = gs_cat.get_resource(projected_tif.name, store=cvg_store)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)

        # FIXME: A patch was submitted OWSlib 20110808
        # Can delete the following once patch appears
        # In the future get bboxNative and nativeSRS from get_metadata
        descCov = metadata._service.getDescribeCoverage(projected_tif.typename)
        envelope = (descCov.find(
            ns('CoverageOffering/') + ns('domainSet/') + ns('spatialDomain/') +
            '{http://www.opengis.net/gml}Envelope'))
        nativeSrs = envelope.attrib['srsName']
        #logger.info(nativeSrs)
        gmlpositions = envelope.findall('{http://www.opengis.net/gml}pos')
        lc = gmlpositions[0].text
        uc = gmlpositions[1].text
        bboxNative = (float(lc.split()[0]), float(lc.split()[1]),
                      float(uc.split()[0]), float(uc.split()[1]))
        #logger.info(bboxNative)
        # ---- END PATCH

        # Make a temp dir to store the saved files
        tempdir = '/tmp/%s' % str(time.time())
        os.mkdir(tempdir)

        # Check that the layer can be downloaded in its native projection
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                              format='GeoTIFF',
                              crs=nativeSrs,
                              bbox=bboxNative,
                              resx=resx,
                              resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False, dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % (nativeSrs, t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # _exactly_ the same size and bbox of the original

        # Test that the layer can be downloaded in ARCGRID format
        cvg_layer.supported_formats = cvg_layer.supported_formats + ['ARCGRID']
        gs_cat.save(cvg_layer)
        cvg = wcs.getCoverage(identifier=projected_tif.typename,
                              format='ARCGRID',
                              crs=nativeSrs,
                              bbox=bboxNative,
                              resx=resx,
                              resy=resy)

        t = tempfile.NamedTemporaryFile(delete=False, dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("ARCGRID in %s = %s" % (nativeSrs, t.name))
        # Check that the downloaded file is a valid ARCGRID file and that it
        # the required projection information
        # (FIXME: There is no prj file here. GS bug)

        # Check that the layer can downloaded in WGS84
        cvg_layer.request_srs_list += ['EPSG:4326']
        cvg_layer.response_srs_list += ['EPSG:4326']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        cvg = wcs.getCoverage(
            identifier=projected_tif.typename,
            format='GeoTIFF',
            crs='EPSG:4326',
            bbox=bboxWGS84,
            #resx=0.000202220898116, # Should NOT be hard-coded!
            # How do we convert
            #resy=0.000202220898116) # See comments in riab issue #103
            width=width,
            height=height)

        t = tempfile.NamedTemporaryFile(delete=False, dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
        #logger.info("GeoTIFF in %s = %s" % ("EPSG:4326", t.name))
        # TODO: Verify that the file is a valid GeoTiff and that it is
        # the correct size and bbox based on the resx and resy or width
        # and height specified

        # Check that we can download the layer in another projection
        cvg_layer.request_srs_list += ['EPSG:32356']
        cvg_layer.response_srs_list += ['EPSG:32356']
        cvg_layer.request_srs_list += ['EPSG:900913']
        cvg_layer.response_srs_list += ['EPSG:900913']
        gs_cat.save(cvg_layer)
        #logger.info(cvg_layer.request_srs_list)
        #logger.info(cvg_layer.response_srs_list)
        # How do we get the bboxes for the newly assigned
        # request/response SRS??

        cvg = wcs.getCoverage(
            identifier=projected_tif.typename,
            format='GeoTIFF',
            crs='EPSG:32356',  # Should not be hardcoded for a test,
            # or should use 900913 (need bbox)
            bbox=bboxNative,
            #resx=0.000202220898116, # Should NOT be hard-coded!
            # How do we convert
            #resy=0.000202220898116) # See comments in riab issue #103
            width=width,
            height=height)

        t = tempfile.NamedTemporaryFile(delete=False, dir=tempdir)

        out = open(t.name, 'wb')
        out.write(cvg.read())
        out.close()
示例#23
0
    def test_the_earthquake_fatality_estimation_allen(self):
        """Fatality computation computed correctly with GeoServer Data
        """

        # Simulate bounding box from application
        viewport_bbox_string = '104.3,-8.2,110.04,-5.17'

        # Upload exposure data for this test
        name = 'Population_2010'
        exposure_filename = '%s/%s.asc' % (TESTDATA, name)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)

        workspace = exposure_layer.workspace
        msg = 'Expected workspace to be "geonode". Got %s' % workspace
        assert workspace == 'geonode'

        layer_name = exposure_layer.name
        msg = 'Expected layer name to be "%s". Got %s' % (name, layer_name)
        assert layer_name == name.lower(), msg

        exposure_name = '%s:%s' % (workspace, layer_name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Now we know that exposure layer is good, lets upload some
        # hazard layers and do the calculations
        filename = 'Lembang_Earthquake_Scenario.asc'

        # Save
        hazard_filename = '%s/%s' % (TESTDATA, filename)
        hazard_layer = save_to_geonode(hazard_filename,
                                       user=self.user, overwrite=True)
        hazard_name = '%s:%s' % (hazard_layer.workspace,
                                 hazard_layer.name)

        # Check metadata
        assert_bounding_box_matches(hazard_layer, hazard_filename)
        haz_bbox_string = get_bounding_box_string(hazard_filename)
        check_layer(hazard_layer, full=True)

        # Run calculation
        c = Client()
        rv = c.post('/impact/api/calculate/', data=dict(
                hazard_server=INTERNAL_SERVER_URL,
                hazard=hazard_name,
                exposure_server=INTERNAL_SERVER_URL,
                exposure=exposure_name,
                #bbox=viewport_bbox_string,
                bbox=exp_bbox_string,  # This one reproduced the
                                       # crash for lembang
                impact_function='EarthquakeFatalityFunction',
                keywords='test,shakemap,usgs'))

        self.assertEqual(rv.status_code, 200)
        self.assertEqual(rv['Content-Type'], 'application/json')
        data = json.loads(rv.content)
        if 'errors' in data:
            errors = data['errors']
            if errors is not None:
                msg = ('The server returned the error message: %s'
                       % str(errors))
                raise Exception(msg)

        assert 'success' in data
        assert 'hazard_layer' in data
        assert 'exposure_layer' in data
        assert 'run_duration' in data
        assert 'run_date' in data
        assert 'layer' in data

        assert data['success']

        # Download result and check
        layer_name = data['layer'].split('/')[-1]

        result_layer = download(INTERNAL_SERVER_URL,
                                layer_name,
                                get_bounding_box_string(hazard_filename))
        assert os.path.exists(result_layer.filename)
示例#24
0
    def test_jakarta_flood_study(self):
        """HKV Jakarta flood study calculated correctly using the API
        """

        # FIXME (Ole): Redo with population as shapefile later

        # Expected values from HKV
        expected_values = [2485442, 1537920]

        # Name files for hazard level, exposure and expected fatalities
        population = 'Population_Jakarta_geographic'
        plugin_name = 'FloodImpactFunction'

        # Upload exposure data for this test
        exposure_filename = '%s/%s.asc' % (TESTDATA, population)
        exposure_layer = save_to_geonode(exposure_filename,
                                         user=self.user, overwrite=True)

        workspace = exposure_layer.workspace
        msg = 'Expected workspace to be "geonode". Got %s' % workspace
        assert workspace == 'geonode'

        layer_name = exposure_layer.name
        msg = 'Expected layer name to be "%s". Got %s' % (population,
                                                          layer_name)
        assert layer_name.lower() == population.lower(), msg

        exposure_name = '%s:%s' % (workspace, layer_name)

        # Check metadata
        assert_bounding_box_matches(exposure_layer, exposure_filename)
        exp_bbox_string = get_bounding_box_string(exposure_filename)
        check_layer(exposure_layer, full=True)

        # Now we know that exposure layer is good, lets upload some
        # hazard layers and do the calculations

        i = 0
        for filename in ['Flood_Current_Depth_Jakarta_geographic.asc',
                         'Flood_Design_Depth_Jakarta_geographic.asc']:

            hazard_filename = os.path.join(TESTDATA, filename)
            exposure_filename = os.path.join(TESTDATA, population)

            # Save
            hazard_filename = '%s/%s' % (TESTDATA, filename)
            hazard_layer = save_to_geonode(hazard_filename,
                                           user=self.user, overwrite=True)
            hazard_name = '%s:%s' % (hazard_layer.workspace,
                                     hazard_layer.name)

            # Check metadata
            assert_bounding_box_matches(hazard_layer, hazard_filename)
            haz_bbox_string = get_bounding_box_string(hazard_filename)
            check_layer(hazard_layer, full=True)

            # Run calculation
            c = Client()
            rv = c.post('/impact/api/calculate/', data=dict(
                    hazard_server=INTERNAL_SERVER_URL,
                    hazard=hazard_name,
                    exposure_server=INTERNAL_SERVER_URL,
                    exposure=exposure_name,
                    bbox=exp_bbox_string,
                    impact_function=plugin_name,
                    keywords='test,flood,HKV'))

            self.assertEqual(rv.status_code, 200)
            self.assertEqual(rv['Content-Type'], 'application/json')
            data = json.loads(rv.content)
            if 'errors' in data:
                errors = data['errors']
                if errors is not None:
                    raise Exception(errors)

            assert 'hazard_layer' in data
            assert 'exposure_layer' in data
            assert 'run_duration' in data
            assert 'run_date' in data
            assert 'layer' in data

            # Do calculation manually and check result
            hazard_raster = read_layer(hazard_filename)
            H = hazard_raster.get_data(nan=0)

            exposure_raster = read_layer(exposure_filename + '.asc')
            P = exposure_raster.get_data(nan=0)

            # Calculate impact manually
            pixel_area = 2500
            I = numpy.where(H > 0.1, P, 0) / 100000.0 * pixel_area

            # Verify correctness against results from HKV
            res = sum(I.flat)
            ref = expected_values[i]
            #print filename, 'Result=%f' % res, ' Expected=%f' % ref
            #print 'Pct relative error=%f' % (abs(res-ref)*100./ref)

            msg = 'Got result %f but expected %f' % (res, ref)
            assert numpy.allclose(res, ref, rtol=1.0e-2), msg

            # Verify correctness of result
            # Download result and check
            layer_name = data['layer'].split('/')[-1]

            result_layer = download(INTERNAL_SERVER_URL,
                                    layer_name,
                                    get_bounding_box_string(hazard_filename))
            assert os.path.exists(result_layer.filename)

            calculated_raster = read_layer(result_layer.filename)
            C = calculated_raster.get_data(nan=0)

            # FIXME (Ole): Bring this back
            # Check caption
            #caption = calculated_raster.get_caption()
            #print
            #print caption
            #expct = 'people'
            #msg = ('Caption %s did not contain expected '
            #       'keyword %s' % (caption, expct))
            #assert expct in caption, msg

            # Compare shape and extrema
            msg = ('Shape of calculated raster differs from reference raster: '
                   'C=%s, I=%s' % (C.shape, I.shape))
            assert numpy.allclose(C.shape, I.shape,
                                  rtol=1e-12, atol=1e-12), msg

            msg = ('Minimum of calculated raster differs from reference '
                   'raster: '
                   'C=%s, I=%s' % (numpy.nanmin(C), numpy.nanmin(I)))
            assert numpy.allclose(numpy.nanmin(C), numpy.nanmin(I),
                                  rtol=1e-6, atol=1e-12), msg
            msg = ('Maximum of calculated raster differs from reference '
                   'raster: '
                   'C=%s, I=%s' % (numpy.nanmax(C), numpy.nanmax(I)))
            assert numpy.allclose(numpy.nanmax(C), numpy.nanmax(I),
                                  rtol=1e-6, atol=1e-12), msg

            # Compare every single value numerically (a bit loose -
            # probably due to single precision conversions when
            # data flows through geonode)
            #
            # FIXME: Not working - but since this test is about
            # issue #162 we'll leave it for now. TODO with NAN
            # Manually verified that the two expected values are correct,
            # though.
            #msg = 'Array values of written raster array were not as expected'
            #print C
            #print I
            #print numpy.amax(numpy.abs(C-I))
            #assert numpy.allclose(C, I, rtol=1e-2, atol=1e-5), msg

            # Check that extrema are in range
            xmin, xmax = calculated_raster.get_extrema()

            assert numpy.alltrue(C[-numpy.isnan(C)] >= xmin), msg
            assert numpy.alltrue(C[-numpy.isnan(C)] <= xmax)
            assert numpy.alltrue(C[-numpy.isnan(C)] >= 0)

            i += 1
示例#25
0
 def test_asc(self):
     """ASCII file can be uploaded
     """
     thefile = os.path.join(TESTDATA, 'test_grid.asc')
     uploaded = save_to_geonode(thefile, user=self.user, overwrite=True)
     check_layer(uploaded, full=True)