Beispiel #1
0
def getGeoServerBoundingBox(geoserver_layername):
    cat = Catalog("{0}/rest/".format(geoserver_connection), geoserver_username,
                  geoserver_password)
    ws = cat.get_workspace(workspace)
    resource = cat.get_resource(geoserver_layername, workspace=ws)
    bbox = resource.latlon_bbox[:4]
    solr_geom = 'ENVELOPE({0},{1},{2},{3})'.format(bbox[0], bbox[1], bbox[3],
                                                   bbox[2])
    return solr_geom
Beispiel #2
0
def layer_exists(layer_name, store, workspace):
    _user, _password = ogc_server_settings.credentials
    url = ogc_server_settings.rest
    gs_catalog = Catalog(url, _user, _password)
    try:
        layer = gs_catalog.get_resource(layer_name, store=store,
                                        workspace=workspace)
        return layer is not None
    except FailedRequestError:
        return False
Beispiel #3
0
def _create_geoserver_geonode_layer(new_table, sld_type, title, the_user,
                                    permissions):
    # Create the Layer in GeoServer from the table
    try:
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                      settings.OGC_SERVER['default']['USER'],
                      settings.OGC_SERVER['default']['PASSWORD'])
        ds = cat.get_store("uploaded")  # name of store in WFP-Geonode
        cat.publish_featuretype(new_table, ds, "EPSG:4326", srs="EPSG:4326")

        # changing the layer's title in the title set by the user
        resource = cat.get_resource(new_table, workspace="geonode")
        resource.title = title
        cat.save(resource)

    except Exception as e:

        msg = "Error creating GeoServer layer for %s: %s" % (new_table, str(e))
        print msg
        return None, msg

    # Create the Layer in GeoNode from the GeoServer Layer
    try:

        link_to_sld = "{location}styles/{sld_type}".format(
            **{
                'location': settings.OGC_SERVER['default']['LOCATION'],
                'sld_type': sld_type
            })

        r = requests.get(link_to_sld)
        sld = r.text
        sld = sld.replace(
            "name_of_layer", new_table
        )  # "name_of_layer" is set in the predefined sld in geoserver (polygon_style, line_style, point_style)
        cat.create_style(new_table, sld, overwrite=True)
        style = cat.get_style(new_table)
        layer = cat.get_layer(new_table)
        layer.default_style = style
        cat.save(layer)
        gs_slurp(owner=the_user, filter=new_table)

        from geonode.base.models import ResourceBase
        layer = ResourceBase.objects.get(title=title)
        geoserver_post_save(layer, ResourceBase)

        # assign permissions for this layer
        permissions_dict = json.loads(permissions)  # needs to be dictionary
        if permissions_dict is not None and len(permissions_dict.keys()) > 0:
            layer.set_permissions(permissions_dict)

    except Exception as e:
        msg = "Error creating GeoNode layer for %s: %s" % (new_table, str(e))
        return None, msg
Beispiel #4
0
def layer_exists(layer_name, store, workspace):
    _user, _password = ogc_server_settings.credentials
    url = ogc_server_settings.rest
    gs_catalog = Catalog(url, _user, _password)
    try:
        layer = gs_catalog.get_resource(layer_name,
                                        store=store,
                                        workspace=workspace)
        return layer is not None
    except FailedRequestError:
        return False
Beispiel #5
0
def geoserver_post_save(instance, sender, **kwargs):
    """Save keywords to GeoServer

       The way keywords are implemented requires the layer
       to be saved to the database before accessing them.
    """
    url = ogc_server_settings.rest
    
    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (FailedRequestError, EnvironmentError) as e:
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' % (
                ogc_server_settings.LOCATION, instance.name.encode('utf-8'))
              )
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a synchronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.keywords = instance.keyword_list()
    #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
    if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
        gs_catalog.save(gs_resource)

    bbox = gs_resource.latlon_bbox
    dx = float(bbox[1]) - float(bbox[0])
    dy = float(bbox[3]) - float(bbox[2])

    dataAspect = 1 if dy == 0 else dx / dy

    height = 550
    width = int(height * dataAspect)

    # Set download links for WMS, WCS or WFS and KML

    links = wms_links(ogc_server_settings.public_url + 'wms?',
                    instance.typename.encode('utf-8'), instance.bbox_string,
                    instance.srid, height, width)

    for ext, name, mime, wms_url in links:
        Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=wms_url,
                        defaults=dict(
                            extension=ext,
                            name=name,
                            mime=mime,
                            link_type='image',
                           )
                        )

    if instance.storeType == "dataStore":
        links = wfs_links(ogc_server_settings.public_url + 'wfs?', instance.typename.encode('utf-8'))
        for ext, name, mime, wfs_url in links:
            Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                            url=wfs_url,
                            defaults=dict(
                                extension=ext,
                                name=name,
                                mime=mime,
                                url=wfs_url,
                                link_type='data',
                            )
                        )


    elif instance.storeType == 'coverageStore':
        #FIXME(Ariel): This works for public layers, does it work for restricted too?
        # would those end up with no geotiff links, like, forever?
        permissions = {}
        permissions['anonymous'] = instance.get_gen_level(ANONYMOUS_USERS)
        permissions['authenticated'] = instance.get_gen_level(AUTHENTICATED_USERS)
        instance.set_gen_level(ANONYMOUS_USERS,'layer_readonly')

        links = wcs_links(ogc_server_settings.public_url + 'wcs?', instance.typename.encode('utf-8'),
            bbox=instance.bbox[:-1], crs=instance.bbox[-1], height=height, width=width)
        for ext, name, mime, wcs_url in links:
            Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                                url=wcs_url,
                                defaults=dict(
                                    extension=ext,
                                    name=name,
                                    mime=mime,
                                    link_type='data',
                                )
                            )
            
        instance.set_gen_level(ANONYMOUS_USERS,permissions['anonymous'])
        instance.set_gen_level(AUTHENTICATED_USERS,permissions['authenticated'])

    kml_reflector_link_download = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode({
        'layers': instance.typename.encode('utf-8'),
        'mode': "download"
    })

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=kml_reflector_link_download,
                        defaults=dict(
                            extension='kml',
                            name=_("KML"),
                            mime='text/xml',
                            link_type='data',
                        )
                    )

    kml_reflector_link_view = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode({
        'layers': instance.typename.encode('utf-8'),
        'mode': "refresh"
    })

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=kml_reflector_link_view,
                        defaults=dict(
                            extension='kml',
                            name=_("View in Google Earth"),
                            mime='text/xml',
                            link_type='data',
                        )
                    )

    tile_url = ('%sgwc/service/gmaps?' % ogc_server_settings.public_url +
                'layers=%s' % instance.typename.encode('utf-8') +
                '&zoom={z}&x={x}&y={y}' +
                '&format=image/png8'
                )

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=tile_url,
                        defaults=dict(
                            extension='tiles',
                            name=_("Tiles"),
                            mime='image/png',
                            link_type='image',
                            )
                        )


    html_link_url = '%s%s' % (settings.SITEURL[:-1], instance.get_absolute_url())

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=html_link_url,
                        defaults=dict(
                            extension='html',
                            name=instance.typename,
                            mime='text/html',
                            link_type='html',
                            )
                        )

    #remove links that belong to and old address

    for link in instance.link_set.all():
        if not urlparse(ogc_server_settings.public_url).hostname == urlparse(link.url).hostname:
            link.delete()

    #Save layer attributes
    set_attributes(instance)

    #Save layer styles
    set_styles(instance, gs_catalog)
    def add_to_geoserver(self):
        # Inspired (copied :) ) from https://groups.google.com/forum/#!msg/geonode-users/R-u57r8aECw/AuEpydZayfIJ # TODO : check license

        # We connect to the catalog
        gsUrl = settings.OGC_SERVER['default']['LOCATION'] + "rest"
        gsUser = settings.OGC_SERVER['default']['USER']
        gsPassword = settings.OGC_SERVER['default']['PASSWORD']
        cat = Catalog(gsUrl, gsUser, gsPassword)
        if cat is None:
            raise Exception('unable to instantiate geoserver catalog')

        # We get the workspace
        ws = cat.get_workspace(settings.DEFAULT_WORKSPACE)
        if ws is None:
            raise Exception('workspace %s not found in geoserver' %
                            settings.DEFAULT_WORKSPACE)

        # We get or create the datastore
        store = cat.get_store(self.datastore_name, ws)
        if store is None:
            store = cat.create_datastore(self.datastore_name, ws)
            store.connection_parameters.update(host="postgres",
                                               port="5432",
                                               database="postgres",
                                               user="******",
                                               passwd="postgres",
                                               schema='offline_osm',
                                               dbtype="postgis")
            cat.save(store)
        if store is None:
            raise Exception('datastore %s not found in geoserver' %
                            self.datastore_name)

        # We get or create each layer then register it into geonode
        for layer in Command.layers:
            layername = layer["name"]
            self.stdout.write('Adding {} to geoserver...'.format(layername))

            layer_exists = (not cat.get_layer(layername) is None)

            if not layer_exists or not self.options['no_overwrite']:
                self.stdout.write(
                    ' layer does not exists or no_overwrite unset, we add...')

                ft = cat.publish_featuretype(layername,
                                             store,
                                             'EPSG:4326',
                                             srs='EPSG:4326')
                if ft is None:
                    raise Exception('unable to publish layer %s' % layername)
                ft.title = 'OpenStreetMap Offline - ' + layername.split(
                    '_')[-1]
                ft.abstract = 'This is an automated extract of the OpenStreetMap database. It is available offline. It is intended to be used as a background layer, but the data can also server analysis purposes.'
                cat.save(ft)

                self.stdout.write(
                    ' adding the style for {}...'.format(layername))
                # We get or create the workspace
                style_path = os.path.join(os.path.dirname(__file__), '..',
                                          '..', 'styles', layer["style_name"])
                if not os.path.exists(style_path):
                    self.stdout.write(
                        ' The file {} does not exist. No style will be applied for {}.'
                        .format(style_path, layername))
                else:
                    cat.create_style(layer["style_name"],
                                     open(style_path, 'r').read(),
                                     overwrite=True,
                                     workspace=settings.DEFAULT_WORKSPACE,
                                     raw=True)

                    style = cat.get_style(layer["style_name"], ws)
                    if style is None:
                        raise Exception('style not found (%s)' %
                                        (layer["style_name"]))

                    publishing = cat.get_layer(layername)
                    if publishing is None:
                        raise Exception('layer not found (%s)' % layerName)

                    publishing.default_style = style
                    cat.save(publishing)

                self.stdout.write(
                    ' registering {} into geonode...'.format(layername))
                resource = cat.get_resource(layername, store, ws)
                if resource is None:
                    raise Exception('resource not found (%s)' % layername)

                layer, created = Layer.objects.get_or_create(name=layername)
                layer.workspace = ws.name
                layer.store = store.name
                layer.storeType = store.resource_type
                layer.typename = "%s:%s" % (ws.name.encode('utf-8'),
                                            resource.name.encode('utf-8'))
                layer.title = resource.title
                layer.abstract = resource.abstract
                layer.temporal_extent_start = self.import_timestamp
                layer.temporal_extent_end = self.import_timestamp
                try:
                    layer.save()
                except TypeError as e:
                    # We ignore a specific error in Geonode resourcebase_post_save when settings.SITEURL has no host (relative URL)
                    self.stdout.write(
                        'TODO : report Geonode exception in resourcebase_post_save when settings.SITEURL has no host (relative URL) : '
                        + str(e))
                    pass

                if created:
                    layer.set_default_permissions()
            else:
                self.stdout.write(' layer already exists, we skip.')

        # We get or create the laygroup
        self.stdout.write('Adding layergroup to geoserver...')
        layername = 'offline_osm'
        layergroup_exists = (not cat.get_layergroup(layername) is None)
        if not layer_exists or not self.options['no_overwrite']:
            self.stdout.write(
                ' layer does not exists or no_overwrite unset, we add...')

            layergroup = cat.get_layergroup(
                layername, workspace=settings.DEFAULT_WORKSPACE)
            if layergroup is None:
                layergroup = cat.create_layergroup(
                    layername,
                    layers=layernames,
                    workspace=settings.DEFAULT_WORKSPACE)
                if layergroup is None:
                    raise Exception('unable to publish layer %s' % layername)
            layergroup.title = 'OpenStreetMap Offline'
            layergroup.abstract = 'This is an automated extract of the OpenStreetMap database. It is available offline. It is intended to be used as a background layer, but the data can also server analysis purposes.'
            cat.save(layergroup)
        else:
            self.stdout.write(' layergroup already exists, we skip.')

        # TODO : can we add layergroups to Geonode ?
        self.stdout.write(
            "laygroup won't be added to geonode (not supported yet)")
Beispiel #7
0
class CatalogTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD'])

    def testAbout(self):
        about_html = self.cat.about()
        self.assertTrue('<html xmlns="http://www.w3.org/1999/xhtml"' in about_html)

    def testGSVersion(self):
        version = self.cat.gsversion()
        pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)')
        self.assertTrue(pat.match('2.2.x'))
        self.assertTrue(pat.match('2.3.2'))
        self.assertTrue(pat.match('2.3-SNAPSHOT'))
        self.assertFalse(pat.match('2.3.y'))
        self.assertFalse(pat.match('233'))
        self.assertTrue(pat.match(version))

    def testWorkspaces(self):
        self.assertEqual(7, len(self.cat.get_workspaces()))
        # marking out test since geoserver default workspace is not consistent
        # self.assertEqual("cite", self.cat.get_default_workspace().name)
        self.assertEqual("topp", self.cat.get_workspace("topp").name)


    def testStores(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        self.assertEqual(9, len(self.cat.get_stores()))
        self.assertEqual(2, len(self.cat.get_stores(topp)))
        self.assertEqual(2, len(self.cat.get_stores(sf)))
        self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name)
        self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name)
        self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem").name)


    def testResources(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        states = self.cat.get_store("states_shapefile", topp)
        sfdem = self.cat.get_store("sfdem", sf)
        self.assertEqual(19, len(self.cat.get_resources()))
        self.assertEqual(1, len(self.cat.get_resources(states)))
        self.assertEqual(5, len(self.cat.get_resources(workspace=topp)))
        self.assertEqual(1, len(self.cat.get_resources(sfdem)))
        self.assertEqual(6, len(self.cat.get_resources(workspace=sf)))

        self.assertEqual("states", self.cat.get_resource("states", states).name)
        self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name)
        self.assertEqual("states", self.cat.get_resource("states").name)
        states = self.cat.get_resource("states")

        fields = [
            states.title,
            states.abstract,
            states.native_bbox,
            states.latlon_bbox,
            states.projection,
            states.projection_policy
        ]

        self.assertFalse(None in fields, str(fields))
        self.assertFalse(len(states.keywords) == 0)
        self.assertFalse(len(states.attributes) == 0)
        self.assertTrue(states.enabled)

        self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem").name)

    def testResourcesUpdate(self):
        res_dest = self.cat.get_resources()
        count = 0

        for rd in res_dest:
            # only wms layers
            if rd.resource_type != "wmsLayer": continue
            # looking for same name
            ro = self.cat.get_resource(rd.name)

            if ro is not None:
                rd.title  = ro.title
                rd.abstract  = ro.abstract
                rd.keywords  = ro.keywords
                rd.projection  = ro.projection
                rd.native_bbox  = ro.native_bbox
                rd.latlon_bbox  = ro.latlon_bbox
                rd.projection_policy  = ro.projection_policy
                rd.enabled  = ro.enabled
                rd.advertised  = ro.advertised
                rd.metadata_links = ro.metadata_links or None

                self.cat.save(rd)
                self.cat.reload()

                # print "Updated layer: " + rd.name
                count += 1

        # print "Total updated layers: " + str(count)

    def testLayers(self):
        expected = set(["Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem",
            "bugsites", "restricted", "streams", "archsites", "roads",
            "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries",
            "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi",
            "giant_polygon"
        ])
        actual = set(l.name for l in self.cat.get_layers())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        states = self.cat.get_layer("states")

        self.assert_("states", states.name)
        self.assert_(isinstance(states.resource, ResourceInfo))
        self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon']))
        self.assertEqual(states.default_style.name, "population")

    def testLayerGroups(self):
        expected = set(["tasmania", "tiger-ny", "spearfish"])
        actual = set(l.name for l in self.cat.get_layergroups())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        tas = self.cat.get_layergroup("tasmania")

        self.assert_("tasmania", tas.name)
        self.assert_(isinstance(tas, LayerGroup))
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        # Try to create a new Layer Group into the "topp" workspace
        self.assert_(self.cat.get_workspace("topp") is not None)
        tas2 = self.cat.create_layergroup("tasmania_reloaded", tas.layers, workspace = "topp")
        self.cat.save(tas2)
        self.assert_(self.cat.get_layergroup("tasmania_reloaded") is None)
        self.assert_(self.cat.get_layergroup("tasmania_reloaded", "topp") is not None)
        tas2 = self.cat.get_layergroup("tasmania_reloaded", "topp")
        self.assert_("tasmania_reloaded", tas2.name)
        self.assert_(isinstance(tas2, LayerGroup))
        self.assertEqual(tas2.workspace, "topp", tas2.workspace)
        self.assertEqual(tas2.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas2.layers)
        self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles)

    def testStyles(self):
        self.assertEqual("population", self.cat.get_style("population").name)
        self.assertEqual("popshade.sld", self.cat.get_style("population").filename)
        self.assertEqual("population", self.cat.get_style("population").sld_name)
        self.assert_(self.cat.get_style('non-existing-style') is None)

    def testEscaping(self):
        # GSConfig is inconsistent about using exceptions vs. returning None
        # when a resource isn't found.
        # But the basic idea is that none of them should throw HTTP errors from
        # misconstructed URLS
        self.cat.get_style("best style ever")
        self.cat.get_workspace("best workspace ever")
        try:
            self.cat.get_store(workspace="best workspace ever",
                name="best store ever")
            self.fail('expected exception')
        except FailedRequestError, fre:
            self.assertEqual('No store found named: best store ever', fre.message)
        try:
            self.cat.get_resource(workspace="best workspace ever",
                store="best store ever",
                name="best resource ever")
        except FailedRequestError, fre:
            self.assertEqual('No store found named: best store ever', fre.message)
class GeoserverLayers(object):

    def __init__(self, gs_apirest, username, password):
        try:
            self.__catalog = Catalog(gs_apirest, username=username, password=password)

        except Exception as err:
            print("Geoserver API Rest Error: {0}".format(err))

    def __createSldStyle(self, sldpath, stylename, ovwrt=False):
        try:
            styles_list = [st.name for st in self.__catalog.get_styles()]
            if ovwrt or not stylename in styles_list:
                with open(sldpath) as f:
                    self.__catalog.create_style(stylename, f.read(), overwrite=ovwrt)
            else:
                print("This style already exists...")

        except Exception as err:
            print("Geoserver API Rest Error creating new style: {0}".format(err))

    def __rmvResources(self, rsrc):
        """
        Removing resources
        """
        self.__catalog.delete(rsrc)
        self.__catalog.reload()

    def rmvStyle(self, stylename):
        """
        Remove style
        """
        try:

            style = self.__catalog.get_style(stylename)
            self.__rmvResources(style)

        except Exception as err:
            print("Geoserver API Rest Error removing style: {0}".format(err))

    def rmvDataStore(self, ds_store):
        """
        Remove DataStore (with his layer)
        """
        try:

            lay_rm = self.__catalog.get_layer(ds_store)
            self.__rmvResources(lay_rm)

            str_rm = self.__catalog.get_store(ds_store)
            self.__rmvResources(str_rm)

        except Exception as err:
            print("Geoserver API Rest Error removing data store: {0}".format(err))

    def createGeoserverWMSLayer(self, data, ws_name, ds_name, stylename, sldpath, debug=False):
        """
        Create Geoserver WMS layer

        Status codes:
             2 : "Geoserver layer successfully created"
            -1 : "Workspace does not exist"
            -2 : "Datastore already exists"
            -3 : "Error creating Geoserver layer"
            -4 : "File missing"
        """
        try:

            if not self.__catalog.get_workspace(ws_name):
                print("Workspace does not exist")
                return -1

            ds_list = [i.name for i in self.__catalog.get_stores()]
            if ds_name in ds_list:
                print("Datastore already exists")
                return -2

            ds = self.__catalog.create_datastore(ds_name, ws_name)
            ft = self.__catalog.create_featurestore(ds_name, workspace=ws_name,data=data)

            print("Geoserver layer successfully created...")

            self.__createSldStyle(sldpath, stylename)
            lyr = self.__catalog.get_layer(ds_name)
            lyr.enabled = True
            lyr.default_style = stylename
            self.__catalog.save(lyr)

            if debug:
                rsrc = self.__catalog.get_resource(ds_name, workspace=ws_name)
                print("Data Store XML: {}".format(rsrc.href))

            return 2

        except Exception as err:
            print("Geoserver API Rest Error creating new layer: {0}".format(err))
            return -3
Beispiel #9
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)
        
        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(
                        [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
                        rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)


        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assert_(self.cat.get_layer('import') is None)
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assert_(self.cat.get_layer('import') is not None)
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            self.cat.delete(ds)

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(ds, "import", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })

    def testCoverageStoreCreate(self):
        ds = self.cat.create_coveragestore2("coverage_gsconfig")
        ds.data_url = "file:data/mytiff.tiff"
        self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs) 

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(
            [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
            rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        ws.type = "WMS"
        self.cat.save(ws)
     
    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            new_layer = self.cat.create_wmslayer(wmstest, wmsstore, layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])
 
        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)

        self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError, 
            lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf)
        )

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf':    'test/data/Pk50095.tfw',
            'prj':    'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)
        )

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)


    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw':    'test/data/Pk50095.tfw',
            'prj':    'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        # TODO: Uploading WorldImage file no longer works???
        # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        # self.assertRaises(
        #         ConflictingDataError, 
        #         lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        # )

        self.assertRaises(
            UploadError, 
            lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf)
        )

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)
        )

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = "Not the original attribution"

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")
     
        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])


    def testStyles(self):
        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assert_(f is not None)
        self.assertEqual(f.name, fred.name)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assert_(jed is None)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assert_(self.cat.get_style("jed", workspace="topp") is None)

        # attempt creating new style
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assert_(f is not None)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp")
        self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assert_(ned is not None)
        self.assert_(zed is not None)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        self.assertEqual("topp:ned", lyr.default_style.fqn)
        self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self): 
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)
Beispiel #10
0
def _register_cascaded_layers(service, owner=None):
    """
    Register layers for a cascading WMS
    """
    if service.type == 'WMS' or service.type == "OWS":
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                      _user, _password)
        # Can we always assume that it is geonode?
        # Should cascading layers have a separate workspace?
        cascade_ws = cat.get_workspace(service.name)
        if cascade_ws is None:
            cascade_ws = cat.create_workspace(service.name, 'cascade')
        try:
            store = cat.get_store(service.name, cascade_ws)
        except Exception:
            store = cat.create_wmsstore(service.name, cascade_ws)
            cat.save(store)
        wms = WebMapService(service.base_url)
        layers = list(wms.contents)

        count = 0
        for layer in layers:
            lyr = cat.get_resource(layer, store, cascade_ws)
            if lyr is None:
                if service.type in ["WMS", "OWS"]:
                    resource = cat.create_wmslayer(cascade_ws, store, layer)
                elif service.type == "WFS":
                    resource = cat.create_wfslayer(cascade_ws, store, layer)

                if resource:
                    bbox = resource.latlon_bbox
                    cascaded_layer, created = Layer.objects.get_or_create(
                        typename="%s:%s" % (cascade_ws.name, resource.name),
                        service=service,
                        defaults={
                            "name": resource.name,
                            "workspace": cascade_ws.name,
                            "store": store.name,
                            "storeType": store.resource_type,
                            "title": resource.title or 'No title provided',
                            "abstract": resource.abstract
                            or 'No abstract provided',
                            "owner": None,
                            "uuid": str(uuid.uuid4()),
                            "bbox_x0": bbox[0],
                            "bbox_x1": bbox[1],
                            "bbox_y0": bbox[2],
                            "bbox_y1": bbox[3],
                        })

                    if created:
                        cascaded_layer.save()
                        if cascaded_layer is not None and cascaded_layer.bbox is None:
                            cascaded_layer._populate_from_gs(
                                gs_resource=resource)
                        cascaded_layer.set_default_permissions()

                        service_layer, created = ServiceLayer.objects.get_or_create(
                            service=service, typename=cascaded_layer.name)
                        service_layer.layer = cascaded_layer
                        service_layer.title = cascaded_layer.title,
                        service_layer.description = cascaded_layer.abstract,
                        service_layer.styles = cascaded_layer.styles
                        service_layer.save()

                        count += 1
                    else:
                        logger.error(
                            "Resource %s from store %s could not be saved as layer"
                            % (layer, store.name))
        message = "%d Layers Registered" % count
        return_dict = {'status': 'ok', 'msg': message}
        return HttpResponse(json.dumps(return_dict),
                            mimetype='application/json',
                            status=200)
    elif service.type == 'WCS':
        return HttpResponse('Not Implemented (Yet)', status=501)
    else:
        return HttpResponse('Invalid Service Type', status=400)
Beispiel #11
0
class wrap_geoserver:
    """ Geoserver (gsconfig) wrapper """
    def __init__(self,
                 geoserver_name,
                 username=username,
                 password=password,
                 easy=False):
        if geoserver_name in list(REST.keys()):
            self.path = REST[geoserver_name]
        else:
            self.path = geoserver_name
        self.wms = self.path.replace("rest/", "wms")

        self.name = geoserver_name
        self.catalog = Catalog(self.path, username, password)

        if not easy:
            self.layers = []
            self.layer_names = []

            for layer in self.catalog.get_layers():
                self.layers.append(layer)
                self.layer_names.append(layer.name)

            self.stores = [store for store in self.catalog.get_stores()]
            self.store_names = [store.name for store in self.stores]

            styles = []
            self.workspaces = []
            self.workspace_names = []

            for workspace in self.catalog.get_workspaces():
                styles = styles + self.catalog.get_styles(workspace)
                self.workspace_names.append(workspace._name)
                self.workspaces.append(workspace)

            self.styles = styles + [
                style for style in self.catalog.get_styles()
            ]
            self.style_names = [style.name for style in self.styles]

    def unpack(self, workspace_name, store_type="datastore"):
        layers_and_styles = {}
        features = []
        workspace = self.get_workspace(workspace_name)

        if store_type == "datastore":
            store_url = workspace.datastore_url
        elif store_type == "coveragestore":
            store_url = workspace.coveragestore_url
        else:
            print("No correct store given")

        for datastore in tqdm(get(store_url, "name")):
            url = "{}workspaces/{}/datastores/{}".format(
                self.path, workspace.name, datastore)
            features = features + get(url, between_quotes=True)

        for feature in features:
            layer_name = os.path.basename(feature).split(".")[0]
            self.get_layer(self.get_slug(workspace.name, layer_name))
            layers_and_styles[layer_name] = self.layer.default_style

        setattr(self, workspace_name + "_data", layers_and_styles)
        return layers_and_styles

    def get_layer(self, layer, easy=False):
        self.layer = self.catalog.get_layer(layer)

        if not easy:
            self.resource = self.layer.resource
            self.layer_name = self.layer.resource.name
            self.sld_name = self.layer.default_style.name
            self.sld_body = self.layer.default_style.sld_body
            self.layer_latlon_bbox = self.layer.resource.latlon_bbox
            self.layer_title = self.layer.resource.title
            self.layer_abstract = self.layer.resource.abstract

    def get_store(self, layer):
        self.store = self.layer.resource._store

    def get_resource(self):
        self.resource = self.catalog.get_resource(self.layer.name, self.store)

    def get_workspace(self, workspace_name):
        self.workspace = self.catalog.get_workspace(workspace_name)
        self.workspace_name = self.workspace._name
        return self.workspace

    def write_abstract(self, data, load_resource=True):
        if load_resource:
            self.get_resource()
        self.resource.abstract = data
        self.catalog.save(self.resource)

    def write_title(self, title):
        self.resource.title = title
        self.catalog.save(self.resource)

    def get_connection_parameters(self):
        self.get_resource()
        return self.resource.store.connection_parameters

    def create_workspace(self, workspace_name):
        workspace_exists = workspace_name in self.workspace_names

        if not workspace_exists:
            self.workspace = self.catalog.create_workspace(workspace_name)

        else:
            print("workspace already exists, using existing workspace")

        self.workspace = self.catalog.get_workspace(workspace_name)
        self.workspace_name = workspace_name

    def create_postgis_datastore(self, store_name, workspace_name, pg_data):

        try:
            self.store = self.catalog.get_store(store_name,
                                                self.workspace_name)
            print("store within workspace exists, using existing store")

        except Exception as e:
            print(e)

            ds = self.catalog.create_datastore(store_name, workspace_name)
            ds.connection_parameters.update(
                host=pg_data["host"],
                port=pg_data["port"],
                database=pg_data["database"],
                user=pg_data["username"],
                passwd=pg_data["password"],
                dbtype="postgis",
                schema="public",
            )

            self.save(ds)
            self.store = self.catalog.get_store(store_name,
                                                self.workspace_name)
            self.store_name = store_name

    def publish_layer(self,
                      layer_name,
                      workspace_name,
                      overwrite=False,
                      epsg="3857",
                      reload=False):

        layer_exists = layer_name in self.layer_names
        # if layer_name in self.workspace_layers[workspace_name]:
        slug = self.get_slug(workspace_name, layer_name)
        if overwrite and layer_exists:
            print("Layer exists, deleting layer")
            try:

                self.layer = self.catalog.get_layer(slug)
                self.delete(self.layer)
                self.reload()

                layer_exists = False

            except Exception as e:
                print(e)
                print("Layer does not exist in workspace")
                layer_exists = False

        if not layer_exists:

            feature_type = self.catalog.publish_featuretype(
                layer_name,
                self.store,
                "EPSG:{}".format(str(epsg)),
                srs="EPSG:{}".format(str(epsg)),
            )
            self.save(feature_type)
            self.feature_type = feature_type

        else:
            print("layer already exists, using existing layer")

        if reload:
            self.get_layer(slug)

        self.layer_name = layer_name

    def publish_layergroup(self,
                           name,
                           layers,
                           styles=(),
                           bounds=None,
                           workspace=None):
        layer_group = self.catalog.create_layergroup(name, layers, styles,
                                                     bounds, workspace)
        self.save(layer_group)

    def save(self, save_object):
        return self.catalog.save(save_object)

    def close(self):
        self.catalog = None

    def delete(self, delete_object):
        self.catalog.delete(delete_object)

    def reload(self):
        self.catalog.reload()

    def upload_shapefile(self, layer_name, shapefile_path):
        path = shapefile_path.split(".shp")[0]
        shapefile = shapefile_and_friends(path)
        ft = self.catalog.create_featurestore(layer_name, shapefile,
                                              self.workspace)
        self.save(ft)

    def upload_sld(self, sld_name, workspace_name, sld, overwrite=True):
        style_exists = sld_name in self.style_names

        if overwrite and style_exists:
            print("Overwriting style")
            style = self.catalog.get_style(sld_name, workspace_name)
            self.delete(style)
            self.reload()
            style_exists = False

        if not style_exists:
            try:
                self.catalog.create_style(sld_name, sld, False, workspace_name,
                                          "sld11")
            except Exception as e:
                print(e)

                style = self.catalog.get_style(sld_name, workspace_name)
                self.delete(style)
                self.reload()
                self.catalog.create_style(sld_name, sld, False, workspace_name,
                                          "sld10")
            self.style_name = sld_name

        else:
            if style_exists:
                print("Style already exists, using current style")
                self.style_name = sld_name

    def set_sld_for_layer(self,
                          workspace_name=None,
                          style_name=None,
                          use_custom=False):
        if not use_custom:
            workspace_name = self.workspace_name
            style_name = self.style_name
            self.style_slug = self.get_slug(workspace_name, style_name)

        else:
            if workspace_name is None:
                self.style_slug = style_name
            else:
                self.style_slug = self.get_slug(workspace_name, style_name)

        self.style = self.catalog.get_style(self.style_slug)

        print("Setting {} for {}".format(self.style.name, self.layer.name))
        self.layer.default_style = self.style
        self.save(self.layer)

    def get_slug(self, workspace, name):
        return "{}:{}".format(workspace, name)

    def get_slug_data(self, slug):
        workspace_name = slug.split(":")[0]
        layer_name = slug.split(":")[1]
        return workspace_name, layer_name

    def get_sld(self, layer_slug=None):
        if layer_slug is None:
            self.style = self.catalog.get_style(self.layer_slug)
        else:
            self.style = self.catalog.get_style(layer_slug)

        self.sld_body = self.style.sld_body
        return self.sld_body

    def get_layer_workspace(self, layer_name):
        return self.catalog.get_layer(layer_name).resource.workspace.name
Beispiel #12
0
def geoserver_post_save(instance, sender, **kwargs):
    """Save keywords to GeoServer

       The way keywords are implemented require the layer
       to be saved to the database before accessing them.
    """
    url = "%srest" % settings.GEOSERVER_BASE_URL

    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (FailedRequestError, EnvironmentError) as e:
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' % (
                settings.GEOSERVER_BASE_URL, instance.name)
              )
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a syncronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.keywords = instance.keyword_list()
    gs_catalog.save(gs_resource)

    bbox = gs_resource.latlon_bbox
    dx = float(bbox[1]) - float(bbox[0])
    dy = float(bbox[3]) - float(bbox[2])

    dataAspect = 1 if dy == 0 else dx / dy

    height = 550
    width = int(height * dataAspect)

    # Set download links for WMS, WCS or WFS and KML

    links = wms_links(settings.GEOSERVER_BASE_URL + 'wms?',
                    instance.typename, instance.bbox_string,
                    instance.srid, height, width)

    for ext, name, mime, wms_url in links:
        instance.link_set.get_or_create(url=wms_url,
                          defaults=dict(
                            extension=ext,
                            name=name,
                            mime=mime,
                            link_type='image',
                           )
        )


    if instance.storeType == "dataStore":
        links = wfs_links(settings.GEOSERVER_BASE_URL + 'wfs?', instance.typename)
        for ext, name, mime, wfs_url in links:
            instance.link_set.get_or_create(url=wfs_url,
                           defaults=dict(
                            extension=ext,
                            name=name,
                            mime=mime,
                            url=wfs_url,
                            link_type='data',
                            )
            )


    elif instance.storeType == 'coverageStore':
        #FIXME(Ariel): This works for public layers, does it work for restricted too?
        # would those end up with no geotiff links, like, forever?
        links = wcs_links(settings.GEOSERVER_BASE_URL + 'wcs?', instance.typename)
        for ext, name, mime, wcs_url in links:
            instance.link_set.get_or_create(url=wcs_url,
                              defaults=dict(
                                extension=ext,
                                name=name,
                                mime=mime,
                                link_type='data',
                                )
                               )


    kml_reflector_link_download = settings.GEOSERVER_BASE_URL + "wms/kml?" + urllib.urlencode({
        'layers': instance.typename,
        'mode': "download"
    })

    instance.link_set.get_or_create(url=kml_reflector_link_download,
                       defaults=dict(
                        extension='kml',
                        name=_("KML"),
                        mime='text/xml',
                        link_type='data',
                        )
                       )

    kml_reflector_link_view = settings.GEOSERVER_BASE_URL + "wms/kml?" + urllib.urlencode({
        'layers': instance.typename,
        'mode': "refresh"
    })

    instance.link_set.get_or_create(url=kml_reflector_link_view,
                       defaults=dict(
                        extension='kml',
                        name=_("View in Google Earth"),
                        mime='text/xml',
                        link_type='data',
                        )
                       )

    tile_url = ('%sgwc/service/gmaps?' % settings.GEOSERVER_BASE_URL +
                'layers=%s' % instance.typename +
                '&zoom={z}&x={x}&y={y}' +
                '&format=image/png8'
                )

    instance.link_set.get_or_create(url=tile_url,
                       defaults=dict(
                        extension='tiles',
                        name=_("Tiles"),
                        mime='text/png',
                        link_type='image',
                        )
                       )

    #Save layer attributes
    set_attributes(instance)

    #Save layer styles
    set_styles(instance, gs_catalog)
Beispiel #13
0
def createDataStore(name, filename, format="shapefile"):
    cat = Catalog("{0}/rest/".format(geoserver_connection), geoserver_username,
                  geoserver_password)
    ws = cat.get_workspace(workspace)
    msg = ""
    if format == "shapefile":
        shapefile = shapefile_and_friends(filename)
        try:
            cat.create_featurestore(name, shapefile, workspace)
        except ConflictingDataError as inst:
            msg = str(inst)
        except:
            raise
        resource = cat.get_resource(name, workspace=ws)
        resource.projection = 'EPSG:4326'
        cat.save(resource)
        resource.projection_policy = 'REPROJECT_TO_DECLARED'
        cat.save(resource)
        resource.refresh()
        bbox = resource.latlon_bbox[:4]
        solr_geom = 'ENVELOPE({0},{1},{2},{3})'.format(bbox[0], bbox[1],
                                                       bbox[3], bbox[2])
        return {
            "solr_geom": solr_geom,
            "msg": msg,
            "resource_type": resource.resource_type
        }
    elif format == "image":
        if cat.get_store(name):
            newcs = cat.get_store(name, workspace=ws)
            msg = "Geoserver datastore already existed. Update existing datastore."
        else:
            newcs = cat.create_coveragestore2(name, ws)
        newcs.type = "GeoTIFF"
        newcs.url = filename
        cat.save(newcs)
        # add coverage
        url = "{0}/rest/workspaces/{1}/coveragestores/{2}/coverages.json"
        url = url.format(geoserver_connection, ws.name, name)
        headers = {"Content-Type": "application/json"}
        coverageName = os.path.splitext(os.path.basename(filename))[0]
        postdata = {
            "coverage": {
                "nativeCoverageName": coverageName,
                "name": coverageName,
                'projectionPolicy': 'REPROJECT_TO_DECLARED',
                'srs': 'EPSG:4326'
            }
        }
        requests.post(url,
                      json.dumps(postdata),
                      headers=headers,
                      auth=(geoserver_username, geoserver_password))
        # Reproject
        resource = cat.get_resource(name, workspace=ws)
        url = "{0}/rest/workspaces/{1}/coveragestores/{2}/coverages/{2}?{3}"
        parameters = "recalculate=nativebbox,latlonbbox"
        url = url.format(geoserver_connection, ws.name, name, parameters)
        headers = {"Content-Type": "application/x-www-form-urlencoded"}
        requests.post(url,
                      headers=headers,
                      auth=(geoserver_username, geoserver_password))
        # resource.refresh()
        bbox = resource.latlon_bbox[:4]
        solr_geom = 'ENVELOPE({0},{1},{2},{3})'.format(bbox[0], bbox[1],
                                                       bbox[3], bbox[2])
        return {
            "solr_geom": solr_geom,
            "msg": msg,
            "resource_type": resource.resource_type
        }
    return True
class UploaderTests(TestCase):
    """Basic checks to make sure pages load, etc.
    """

    def create_datastore(self, connection, catalog):
        """Convenience method for creating a datastore.
        """
        settings = connection.settings_dict
        ds_name = settings['NAME']
        params = {
            'database': ds_name,
            'passwd': settings['PASSWORD'],
            'namespace': 'http://www.geonode.org/',
            'type': 'PostGIS',
            'dbtype': 'postgis',
            'host': settings['HOST'],
            'user': settings['USER'],
            'port': settings['PORT'],
            'enabled': 'True'
        }

        store = catalog.create_datastore(ds_name, workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(ds_name)

    def create_user(self, username, password, **kwargs):
        """Convenience method for creating users.
        """
        user, created = User.objects.get_or_create(username=username, **kwargs)

        if created:
            user.set_password(password)
            user.save()

        return user

    def setUp(self):

        self.assertTrue(
            os.path.exists(_TEST_FILES_DIR),
            'Test could not run due to missing test data at {0!r}'
            .format(_TEST_FILES_DIR)
        )

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.admin_user = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_user = self.create_user('non_admin', 'non_admin')
        self.catalog = Catalog(
            ogc_server_settings.internal_rest,
            *ogc_server_settings.credentials
        )
        if self.catalog.get_workspace('geonode') is None:
            self.catalog.create_workspace('geonode', 'http://www.geonode.org/')
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.catalog)

    def tearDown(self):
        """Clean up geoserver.
        """
        self.catalog.delete(self.datastore, recurse=True)

    def import_file(self, path, configs=None):
        """Imports the file.
        """
        if configs is None:
            configs = []
        self.assertTrue(os.path.exists(path))

        # run ogr2ogr
        ogr = OGRImport(path)
        layers = ogr.handle(configuration_options=configs)

        return layers

    def generic_import(self, filename, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = test_file(filename)
        results = self.import_file(path, configs=configs)
        layer_results = []
        for result in results:
            if result[1].get('raster'):
                layer_path = result[0]
                layer_name = os.path.splitext(os.path.basename(layer_path))[0]
                layer = Layer.objects.get(name=layer_name)
                self.assertTrue(layer_path.endswith('.tif'))
                self.assertTrue(os.path.exists(layer_path))
                gdal_layer = gdal.OpenEx(layer_path)
                self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
                layer_results.append(layer)
            else:
                layer = Layer.objects.get(name=result[0])
                self.assertEqual(layer.srid, 'EPSG:4326')
                self.assertEqual(layer.store, self.datastore.name)
                self.assertEqual(layer.storeType, 'dataStore')

                if not path.endswith('zip'):
                    self.assertGreaterEqual(
                        layer.attributes.count(),
                        DataSource(path)[0].num_fields
                    )

                layer_results.append(layer)

        return layer_results[0]

    def generic_api_upload(self, filenames, configs=None):
        """Tests the import api.
        """
        client = AdminClient()
        client.login_as_non_admin()

        # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o'].
        self.assertNotIsInstance(filenames, str)

        # Upload Files
        outfiles = []
        for filename in filenames:
            path = test_file(filename)
            with open(path) as stream:
                data = stream.read()
            upload = SimpleUploadedFile(filename, data)
            outfiles.append(upload)
        response = client.post(
            reverse('uploads-new-json'),
            {'file': outfiles,
             'json': json.dumps(configs)},
            follow=True)
        content = json.loads(response.content)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(content['id'], 1)

        # Configure Uploaded Files
        upload_id = content['id']
        upload_layers = UploadLayer.objects.filter(upload_id=upload_id)

        for upload_layer in upload_layers:
            for config in configs:
                if config['upload_file_name'] == upload_layer.name:
                    payload = config['config']
                    url = '/importer-api/data-layers/{0}/configure/'.format(upload_layer.id)
                    response = client.post(
                        url, data=json.dumps(payload),
                        content_type='application/json'
                    )
                    self.assertTrue(response.status_code, 200)
                    url = '/importer-api/data-layers/{0}/'.format(upload_layer.id)
                    response = client.get(url, content_type='application/json')
                    self.assertTrue(response.status_code, 200)

        return content

    def generic_raster_import(self, filename, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = test_file(filename)
        results = self.import_file(path, configs=configs)
        layer_path = results[0][0]
        layer_name = os.path.splitext(os.path.basename(layer_path))[0]
        layer = Layer.objects.get(name=layer_name)
        self.assertTrue(layer_path.endswith('.tif'))
        self.assertTrue(os.path.exists(layer_path))
        gdal_layer = gdal.OpenEx(layer_path)
        self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
        return layer

    def test_multi_upload(self):
        """Tests Uploading Multiple Files
        """
        upload = self.generic_api_upload(
            filenames=[
                'boxes_with_year_field.zip',
                'boxes_with_date.zip',
                'point_with_date.geojson'
            ],
            configs=[
                {
                    'upload_file_name': 'boxes_with_year_field.shp',
                    'config': [{'index': 0}]
                },
                {
                    'upload_file_name': 'boxes_with_date.shp',
                    'config': [{'index': 0}]
                },
                {
                    'upload_file_name': 'point_with_date.geojson',
                    'config': [{'index': 0}]
                }
            ]
        )
        self.assertEqual(9, upload['count'])

    def test_upload_with_slds(self):
        """Tests Uploading sld
        """
        upload = self.generic_api_upload(
            filenames=[
                'boxes_with_date.zip',
                'boxes.sld',
                'boxes1.sld'
            ],
            configs=[
                {
                    'upload_file_name': 'boxes_with_date.shp',
                    'config': [
                        {
                            'index': 0,
                            'default_style': 'boxes.sld',
                            'styles': ['boxes.sld', 'boxes1.sld']
                        }
                    ]
                }
            ]
        )
        self.assertEqual(6, upload['count'])
        upload_id = upload['id']
        upload_obj = UploadedData.objects.get(pk=upload_id)
        uplayers = UploadLayer.objects.filter(upload=upload_id)
        layer_id = uplayers[0].pk

        upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
        self.assertEqual(6, upfiles_count)

        # Warning: this assumes that Layer pks equal UploadLayer pks
        layer = Layer.objects.get(pk=layer_id)
        gslayer = self.catalog.get_layer(layer.name)
        default_style = gslayer.default_style
        # TODO: can we use public API or omit this?
        self.catalog._cache.clear()
        self.assertEqual('boxes.sld', default_style.filename)

    def test_upload_with_metadata(self):
        """Tests Uploading metadata
        """
        upload = self.generic_api_upload(
            filenames=[
                'boxes_with_date.zip',
                'samplemetadata.xml',
            ],
            configs=[
                {
                    'upload_file_name': 'boxes_with_date.shp',
                    'config': [
                        {
                            'index': 0,
                            'metadata': 'samplemetadata.xml'
                        }
                    ]
                }
            ]
        )
        self.assertEqual(5, upload['count'])
        upload_id = upload['id']
        upload_obj = UploadedData.objects.get(pk=upload_id)
        # TODO: why did we get upload_obj?
        uplayers = UploadLayer.objects.filter(upload=upload_id)
        layer_id = uplayers[0].pk

        upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
        self.assertEqual(5, upfiles_count)

        layer = Layer.objects.get(pk=layer_id)
        self.assertEqual(layer.language, 'eng')
        self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar')

    def test_raster(self):
        """Exercise raster import.
        """
        layer = self.generic_raster_import(
            'test_grid.tif',
            configs=[
                {
                    'index': 0
                }
            ]
        )
        self.assertTrue(layer.name.startswith('test_grid'))

    def test_box_with_year_field(self):
        """Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import(
            'boxes_with_year_field.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import(
            'boxes_with_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_gpkg(self):
        """Tests the import of test_boxes_with_date.gpkg.
        """

        layer = self.generic_import(
            'boxes_with_date.gpkg',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_plus_raster_gpkg_by_index(self):
        """Tests the import of multilayer vector + raster geopackage using index
        """

        layer = self.generic_import(
            'boxes_plus_raster.gpkg',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                },
                {'index': 1},
                {'index': 2},
                {'index': 3},
                {'index': 4},
                {'index': 5},
                {'index': 6},
                {'index': 7},
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """Tests a CSV with WKT polygon.
        """

        layer = self.generic_import(
            'boxes_with_date.csv',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_csv_missing_features(self):
        """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding.
        """

        self.generic_import(
            'missing-features.csv',
            configs=[
                {'index': 0}
            ]
        )

    def test_boxes_with_iso_date(self):
        """Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import(
            'boxes_with_date_iso_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_duplicate_imports(self):
        """Import the same layer twice to ensure file names increment properly.
        """
        path = test_file('boxes_with_date_iso_date.zip')
        ogr = OGRImport(path)
        layers1 = ogr.handle({'index': 0, 'name': 'test'})
        layers2 = ogr.handle({'index': 0, 'name': 'test'})

        self.assertEqual(layers1[0][0], 'test')
        self.assertEqual(layers2[0][0], 'test0')

    def test_launder(self):
        """Ensure the launder function works as expected.
        """
        self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3')
        self.assertEqual(launder('Testing#'), 'testing_')
        self.assertEqual(launder('   '), '_')

    def test_boxes_with_date_iso_date_zip(self):
        """Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import(
            'boxes_with_date_iso_date.zip',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import(
            'boxes_with_dates_bc.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """Tests the import of point_with_date.geojson
        """

        layer = self.generic_import(
            'point_with_date.geojson',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """Tests the import of test_boxes_with_end_date.

        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import(
            'boxes_with_end_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date', 'enddate'],
                    'start_date': 'date',
                    'end_date': 'enddate',
                    'configureTime': True
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        end_date_attr = get_layer_attr(layer, 'enddate_as_date')

        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
            end_attribute=end_date_attr.attribute
        )
        self.generic_time_check(
            layer,
            attribute=date_attr.attribute,
            end_attribute=end_date_attr.attribute
        )

    def test_us_states_kml(self):
        """Tests the import of us_states_kml.

        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.
        layer = self.generic_import(
            'us_states.kml',
            configs=[
                {
                    'index': 0
                }
            ]
        )
        self.assertEqual(layer.name.lower(), "us_states")

    def test_mojstrovka_gpx(self):
        """Tests the import of mojstrovka.gpx.

        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import(
            'mojstrovka.gpx',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['time'],
                    'configureTime': True
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'time_as_date')
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """Convenience method to run generic tests on time layers.
        """
        # TODO: can we use public API or omit this?
        self.catalog._cache.clear()
        resource = self.catalog.get_resource(
            layer.name, store=layer.store, workspace=self.workspace
        )

        time_info = resource.metadata['time']
        self.assertEqual('LIST', time_info.presentation)
        self.assertEqual(True, time_info.enabled)
        self.assertEqual(attribute, time_info.attribute)
        self.assertEqual(end_attribute, time_info.end_attribute)

    def test_us_shootings_csv(self):
        """Tests the import of US_Shootings.csv.
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')

        path = test_file('US_Shootings.csv')
        layer = self.generic_import(
            path,
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['Date']
                }
            ]
        )
        self.assertTrue(layer.name.startswith('us_shootings'))

        date_field = 'date'
        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_field
        )
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """Tests the import of US_Civil_Rights_Sitins0.csv
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')
        layer = self.generic_import(
            'US_Civil_Rights_Sitins0.csv',
            configs=[
                {
                    'index': 0, 'convert_to_date': ['Date']
                }
            ]
        )
        self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0')

    def get_layer_names(self, path):
        """Gets layer names from a data source.
        """
        data_source = DataSource(path)
        return [layer.name for layer in data_source]

    def test_gdal_import(self):
        path = test_file('point_with_date.geojson')
        self.generic_import(
            path,
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )

    def test_wfs(self):
        """Tests the import from a WFS Endpoint
        """
        wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs'
        ogr = OGRImport(wfs)
        configs = [
            {'layer_name': 'og:bugsites'},
            {'layer_name': 'topp:states'}
        ]
        layers = ogr.handle(configuration_options=configs)
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_arcgisjson(self):
        """Tests the import from a WFS Endpoint
        """
        endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json'
        ogr = OGRImport(endpoint)
        configs = [{'index':0}]
        layers = ogr.handle(configuration_options=configs)
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_file_add_view(self):
        """Tests the file_add_view.
        """
        client = AdminClient()

        # test login required for this view
        request = client.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path, reverse('uploads-list'))
        self.assertEqual(len(response.context['object_list']), 1)
        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))

        with open(test_file('empty_file.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_file_add_view_as_json(self):
        """Tests the file_add_view.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new-json'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(response.status_code, 200)
        self.assertIn('application/json', response.get('Content-Type', ''))
        content = json.loads(response.content)
        self.assertIn('state', content)
        self.assertIn('id', content)

    def test_describe_fields(self):
        """Tests the describe fields functionality.
        """
        path = test_file('US_Shootings.csv')
        with GDALInspector(path) as inspector:
            layers = inspector.describe_fields()

        self.assertTrue(layers[0]['layer_name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed',
                                                                    'Wounded', 'Location', 'City',
                                                                    'Longitude', 'Latitude'])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """Tests the describe fields functionality.
        """
        filenames = {
            'US_Shootings.csv': 'CSV',
            'point_with_date.geojson': 'GeoJSON',
            'mojstrovka.gpx': 'GPX',
            'us_states.kml': 'KML',
            'boxes_with_year_field.shp': 'ESRI Shapefile',
            'boxes_with_date_iso_date.zip': 'ESRI Shapefile'
        }
        from .models import NoDataSourceFound
        try:
            for filename, file_type in sorted(filenames.items()):
                path = test_file(filename)
                with GDALInspector(path) as inspector:
                    self.assertEqual(inspector.file_type(), file_type)
        except NoDataSourceFound:
            logging.exception('No data source found in: {0}'.format(path))
            raise

    def test_configure_view(self):
        """Tests the configuration view.
        """
        path = test_file('point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        client = AdminClient()
        client.login_as_non_admin()

        with open(path) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        upload = response.context['object_list'][0]

        payload = [
            {
                'index': 0,
                'convert_to_date': ['date'],
                'start_date': 'date',
                'configureTime': True,
                'editable': True,
                'permissions': {
                    'users': {
                        'test': new_user_perms,
                        'AnonymousUser': [
                            'change_layer_data',
                            'download_resourcebase',
                            'view_resourcebase'
                        ]
                    }
                }
            }
        ]
        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id),
            data=json.dumps(payload),
            content_type='application/json'
        )

        self.assertTrue(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(
            Layer.objects.all()[0].owner.username,
            self.non_admin_user.username
        )

        perms = first_layer.get_all_level_info()
        user = User.objects.get(username=self.non_admin_user.username)

        # check user permissions
        expected_perms = [
            u'publish_resourcebase',
            u'change_resourcebase_permissions',
            u'delete_resourcebase',
            u'change_resourcebase',
            u'change_resourcebase_metadata',
            u'download_resourcebase',
            u'view_resourcebase',
            u'change_layer_style',
            u'change_layer_data'
        ]
        for perm in expected_perms:
            self.assertIn(perm, perms['users'][user])

        self.assertTrue(perms['users'][new_user])
        self.assertIn(
            'change_resourcebase_permissions',
            perms['users'][new_user]
        )
        self.assertIn(
            'change_layer_data',
            perms['users'][User.objects.get(username='******')]
        )

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, first_layer)

    def test_configure_view_convert_date(self):
        """Tests the configure view with a dataset that needs to be converted to a date.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('US_Shootings.csv')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        upload = response.context['object_list'][0]

        payload = [
            {
                'index': 0,
                'convert_to_date': ['Date'],
                'start_date': 'Date',
                'configureTime': True,
                'editable': True
            }
        ]

        response = client.get(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id)
        )
        self.assertEqual(response.status_code, 405)

        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id)
        )
        self.assertEqual(response.status_code, 400)

        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id),
            data=json.dumps(payload),
            content_type='application/json'
        )
        self.assertTrue(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime')

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)

        # ensure a user who does not own the upload cannot configure an import from it.
        client.logout()
        client.login_as_admin()
        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id)
        )
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        client = AdminClient()

        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        client.login_as_non_admin()

        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.admin_user.username)
        non_admin = User.objects.get(username=self.non_admin_user.username)

        path = test_file('US_Shootings.csv')
        with open(path, 'rb') as stream:
            uploaded_file = SimpleUploadedFile('test_data', stream.read())
            admin_upload = UploadedData.objects.create(state='Admin test', user=admin)
            admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

        client.login_as_admin()
        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = client.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        client = AdminClient()
        response = client.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        client.login_as_non_admin()

        response = client.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """Ensure users can delete their data.
        """
        client = AdminClient()

        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(UploadedData.objects.all().count(), 1)
        upload_id = UploadedData.objects.first().id
        response = client.delete('/importer-api/data/{0}/'.format(upload_id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """Ensure that administrators can delete data that isn't theirs.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(UploadedData.objects.all().count(), 1)

        client.logout()
        client.login_as_admin()

        upload_id = UploadedData.objects.first().id
        response = client.delete('/importer-api/data/{0}/'.format(upload_id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """Tests providing a name in the configuration options.
        """
        client = AdminClient()
        client.login_as_non_admin()
        name = 'point-with-a-date'
        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'name': name,
            'editable': True
        }
        response = client.post(
            '/importer-api/data-layers/1/configure/',
            data=json.dumps(payload),
            content_type='application/json'
        )
        self.assertEqual(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """Tests the import api.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True
        }

        self.assertIsInstance(
            UploadLayer.objects.first().configuration_options,
            dict
        )

        response = client.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = client.post(
            '/importer-api/data-layers/1/configure/',
            data=json.dumps([payload]),
            content_type='application/json'
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn('task', response.content)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, first_layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """Test the file extension validator.
        """

        for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions:
            filename = 'test.{0}'.format(extension)
            upload = SimpleUploadedFile(filename, '')
            self.assertIsNone(validate_file_extension(upload))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """Regression test for numeric field overflows in shapefiles.

        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import(
            'Walmart.zip',
            configs=[
                {
                    'configureTime':False,
                    'convert_to_date': ['W1_OPENDAT'],
                    'editable': True,
                    'index':0,
                    'name': 'Walmart',
                    'start_date': 'W1_OPENDAT'
                }
            ]
        )

    def test_multipolygon_shapefile(self):
        """Tests shapefile with multipart polygons.
        """

        self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}])

    def test_istanbul(self):
        """Tests shapefile with multipart polygons and non-WGS84 SR.
        """
        result = self.generic_import(
            'Istanbul.zip',
            configs=[
                {'index': 0}
            ]
        )
        feature_type = self.catalog.get_resource(result.name)
        self.assertEqual(feature_type.projection, 'EPSG:32635')

    def test_gwc_handler(self):
        """Tests the GeoWebCache handler
        """
        layer = self.generic_import(
            'boxes_with_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                }
            ]
        )

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.catalog.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
        self.assertIn('regexParameterFilter', payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import(
            'boxes_with_date_iso_date.shp',
            configs=[
                {
                    'index': 0
                }
            ]
        )
        gs_layer = self.catalog.get_layer(layer.name)
        self.catalog._cache.clear()
        gs_layer.fetch()

        payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
        self.assertNotIn('regexParameterFilter', payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

    def test_utf8(self):
        """Tests utf8 characters in attributes
        """
        path = test_file('china_provinces.shp')
        layer = self.generic_import(path)
        ogr = OGRImport(path)
        datastore, _ = ogr.open_target_datastore(ogr.target_store)
        sql = (
            "select NAME_CH from {0} where NAME_PY = 'An Zhou'"
            .format(layer.name)
        )
        result = datastore.ExecuteSQL(sql)
        feature = result.GetFeature(0)
        self.assertEqual(feature.GetField('name_ch'), '安州')

    def test_non_converted_date(self):
        """Test converting a field as date.
        """
        results = self.generic_import(
            'TM_WORLD_BORDERS_2005.zip',
            configs=[
                {
                    'index': 0,
                    'start_date': 'Year',
                    'configureTime': True
                }
            ]
        )
        layer = self.catalog.get_layer(results.typename)
        self.assertIn('time', layer.resource.metadata)
        self.assertEqual('year', layer.resource.metadata['time'].attribute)

    def test_fid_field(self):
        """
        Regression test for preserving an FID field when target layer supports
        it but source does not.
        """
        self.generic_import(
            'noaa_paleoclimate.zip',
            configs=[
                {
                    'index': 0
                }
            ]
        )

    def test_csv_with_wkb_geometry(self):
        """Exercise import of CSV files with multiple geometries.
        """
        filenames = [
            'police_csv.csv',
            'police_csv_nOGC.csv',
            'police_csv_noLatLon.csv',
            'police_csv_WKR.csv',
            'police_csv_nFID.csv',
            'police_csv_nOGFID.csv',
            'police_csv_noWKB.csv'
        ]

        for filename in filenames:
            self.generic_import(
                filename,
                {
                    'configureTime': True,
                    'convert_to_date': ['date_time'],
                    'editable': True,
                    'index': 0,
                    'name': filename.lower(),
                    'permissions': {
                        'users':{
                            'AnonymousUser': [
                                'change_layer_data',
                                'download_resourcebase',
                                'view_resourcebase'
                            ]
                        }
                    },
                    'start_date':'date_time',
                }
            )
class UploaderTests(TestCase):
    """Basic checks to make sure pages load, etc.
    """
    def create_datastore(self, connection, catalog):
        """Convenience method for creating a datastore.
        """
        settings = connection.settings_dict
        ds_name = settings['NAME']
        params = {
            'database': ds_name,
            'passwd': settings['PASSWORD'],
            'namespace': 'http://www.geonode.org/',
            'type': 'PostGIS',
            'dbtype': 'postgis',
            'host': settings['HOST'],
            'user': settings['USER'],
            'port': settings['PORT'],
            'enabled': 'True'
        }

        store = catalog.create_datastore(ds_name, workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(ds_name)

    def create_user(self, username, password, **kwargs):
        """Convenience method for creating users.
        """
        user, created = User.objects.get_or_create(username=username, **kwargs)

        if created:
            user.set_password(password)
            user.save()

        return user

    def setUp(self):

        self.assertTrue(
            os.path.exists(_TEST_FILES_DIR),
            'Test could not run due to missing test data at {0!r}'.format(
                _TEST_FILES_DIR))

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.admin_user = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_user = self.create_user('non_admin', 'non_admin')
        self.catalog = Catalog(ogc_server_settings.internal_rest,
                               *ogc_server_settings.credentials)
        if self.catalog.get_workspace('geonode') is None:
            self.catalog.create_workspace('geonode', 'http://www.geonode.org/')
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.catalog)

    def tearDown(self):
        """Clean up geoserver.
        """
        self.catalog.delete(self.datastore, recurse=True)

    def import_file(self, path, configs=None):
        """Imports the file.
        """
        if configs is None:
            configs = []
        self.assertTrue(os.path.exists(path))

        # run ogr2ogr
        ogr = OGRImport(path)
        layers = ogr.handle(configuration_options=configs)

        return layers

    def generic_import(self, filename, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = test_file(filename)
        results = self.import_file(path, configs=configs)
        layer_results = []
        for result in results:
            if result[1].get('raster'):
                layer_path = result[0]
                layer_name = os.path.splitext(os.path.basename(layer_path))[0]
                layer = Layer.objects.get(name=layer_name)
                self.assertTrue(layer_path.endswith('.tif'))
                self.assertTrue(os.path.exists(layer_path))
                gdal_layer = gdal.OpenEx(layer_path)
                self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
                layer_results.append(layer)
            else:
                layer = Layer.objects.get(name=result[0])
                self.assertEqual(layer.srid, 'EPSG:4326')
                self.assertEqual(layer.store, self.datastore.name)
                self.assertEqual(layer.storeType, 'dataStore')

                if not path.endswith('zip'):
                    self.assertGreaterEqual(layer.attributes.count(),
                                            DataSource(path)[0].num_fields)

                layer_results.append(layer)

        return layer_results[0]

    def generic_api_upload(self, filenames, configs=None):
        """Tests the import api.
        """
        client = AdminClient()
        client.login_as_non_admin()

        # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o'].
        self.assertNotIsInstance(filenames, str)

        # Upload Files
        outfiles = []
        for filename in filenames:
            path = test_file(filename)
            with open(path) as stream:
                data = stream.read()
            upload = SimpleUploadedFile(filename, data)
            outfiles.append(upload)
        response = client.post(reverse('uploads-new-json'), {
            'file': outfiles,
            'json': json.dumps(configs)
        },
                               follow=True)
        content = json.loads(response.content)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(content['id'], 1)

        # Configure Uploaded Files
        upload_id = content['id']
        upload_layers = UploadLayer.objects.filter(upload_id=upload_id)

        for upload_layer in upload_layers:
            for config in configs:
                if config['upload_file_name'] == os.path.basename(
                        upload_layer.name):
                    payload = config['config']
                    url = '/importer-api/data-layers/{0}/configure/'.format(
                        upload_layer.id)
                    response = client.post(url,
                                           data=json.dumps(payload),
                                           content_type='application/json')
                    self.assertEqual(response.status_code, 200)
                    url = '/importer-api/data-layers/{0}/'.format(
                        upload_layer.id)
                    response = client.get(url, content_type='application/json')
                    self.assertEqual(response.status_code, 200)

        return content

    def generic_raster_import(self, filename, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = test_file(filename)
        results = self.import_file(path, configs=configs)
        layer_path = results[0][0]
        layer_name = os.path.splitext(os.path.basename(layer_path))[0]
        layer = Layer.objects.get(name=layer_name)
        self.assertTrue(layer_path.endswith('.tif'))
        self.assertTrue(os.path.exists(layer_path))
        gdal_layer = gdal.OpenEx(layer_path)
        self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
        return layer

    def test_multi_upload(self):
        """Tests Uploading Multiple Files
        """
        # Number of layers in each file
        upload_layer_counts = [1, 1, 1]
        upload = self.generic_api_upload(filenames=[
            'boxes_with_year_field.zip', 'boxes_with_date.zip',
            'point_with_date.geojson'
        ],
                                         configs=[{
                                             'upload_file_name':
                                             'boxes_with_year_field.shp',
                                             'config': [{
                                                 'index': 0
                                             }]
                                         }, {
                                             'upload_file_name':
                                             'boxes_with_date.shp',
                                             'config': [{
                                                 'index': 0
                                             }]
                                         }, {
                                             'upload_file_name':
                                             'point_with_date.geojson',
                                             'config': [{
                                                 'index': 0
                                             }]
                                         }])

        self.assertEqual(Layer.objects.count(), sum(upload_layer_counts))
        self.assertEqual(9, upload['count'])

    def test_upload_with_slds(self):
        """Tests Uploading sld
        """
        upload = self.generic_api_upload(
            filenames=['boxes_with_date.zip', 'boxes.sld', 'boxes1.sld'],
            configs=[{
                'upload_file_name':
                'boxes_with_date.shp',
                'config': [{
                    'index': 0,
                    'default_style': 'boxes.sld',
                    'styles': ['boxes.sld', 'boxes1.sld']
                }]
            }])
        self.assertEqual(6, upload['count'])
        upload_id = upload['id']
        uplayers = UploadLayer.objects.filter(upload=upload_id)
        layer_id = uplayers[0].pk

        upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
        self.assertEqual(6, upfiles_count)

        # Warning: this assumes that Layer pks equal UploadLayer pks
        layer = Layer.objects.get(pk=layer_id)
        gslayer = self.catalog.get_layer(layer.name)
        default_style = gslayer.default_style
        # TODO: can we use public API or omit this?
        self.catalog._cache.clear()
        self.assertEqual('boxes.sld', default_style.filename)

    def test_upload_with_metadata(self):
        """Tests Uploading metadata
        """
        upload = self.generic_api_upload(filenames=[
            'boxes_with_date.zip',
            'samplemetadata.xml',
        ],
                                         configs=[{
                                             'upload_file_name':
                                             'boxes_with_date.shp',
                                             'config': [{
                                                 'index':
                                                 0,
                                                 'metadata':
                                                 'samplemetadata.xml'
                                             }]
                                         }])
        self.assertEqual(5, upload['count'])
        upload_id = upload['id']
        uplayers = UploadLayer.objects.filter(upload=upload_id)
        layer_id = uplayers[0].pk

        upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
        self.assertEqual(5, upfiles_count)

        layer = Layer.objects.get(pk=layer_id)
        self.assertEqual(layer.language, 'eng')
        self.assertEqual(layer.title,
                         'Old_Americas_LSIB_Polygons_Detailed_2013Mar')

    def test_geotiff_raster(self):
        """Exercise GeoTIFF raster import.
        """
        layer = self.generic_raster_import('test_grid.tif',
                                           configs=[{
                                               'index': 0
                                           }])
        self.assertTrue(layer.name.startswith('test_grid'))

    def test_nitf_raster(self):
        """Tests NITF raster import
        """
        layer = self.generic_raster_import('test_nitf.nitf')
        self.assertTrue(layer.name.startswith('test_nitf'))

    def test_box_with_year_field(self):
        """Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import('boxes_with_year_field.shp',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import('boxes_with_date.shp',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date'],
                                        'start_date': 'date',
                                        'configureTime': True
                                    }])

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_gpkg(self):
        """Tests the import of test_boxes_with_date.gpkg.
        """

        layer = self.generic_import('boxes_with_date.gpkg',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date'],
                                        'start_date': 'date',
                                        'configureTime': True
                                    }])

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_plus_raster_gpkg_by_index(self):
        """ Tests the import of multilayer vector + tile geopackage using index, treating tile layers
            as rasters.
            Tile layers are now treated by default as a distinct layer type.
            This test forces them to still be treated as rasters and should be
            removed once tests for vector/tile geopackage files are in place.
        """

        layer = self.generic_import('boxes_plus_raster.gpkg',
                                    configs=[
                                        {
                                            'index': 0,
                                            'convert_to_date': ['date'],
                                            'start_date': 'date',
                                            'configureTime': True
                                        },
                                        {
                                            'index': 1
                                        },
                                        {
                                            'index': 2
                                        },
                                        {
                                            'index': 3
                                        },
                                        {
                                            'index': 4
                                        },
                                        {
                                            'index': 5
                                        },
                                        {
                                            'index': 6,
                                            'layer_type': 'raster'
                                        },
                                        {
                                            'index': 7,
                                            'layer_type': 'raster'
                                        },
                                    ])

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """Tests a CSV with WKT polygon.
        """

        layer = self.generic_import('boxes_with_date.csv',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_csv_missing_features(self):
        """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding.
        """

        self.generic_import('missing-features.csv', configs=[{'index': 0}])

    def test_boxes_with_iso_date(self):
        """Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.shp',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_duplicate_imports(self):
        """Import the same layer twice to ensure file names increment properly.
        """
        path = test_file('boxes_with_date_iso_date.zip')
        ogr = OGRImport(path)
        layers1 = ogr.handle({'index': 0, 'name': 'test'})
        layers2 = ogr.handle({'index': 0, 'name': 'test'})

        self.assertEqual(layers1[0][0], 'test')
        self.assertEqual(layers2[0][0], 'test0')

    def test_launder(self):
        """Ensure the launder function works as expected.
        """
        self.assertEqual(launder('tm_world_borders_simpl_0.3'),
                         'tm_world_borders_simpl_0_3')
        self.assertEqual(launder('Testing#'), 'testing_')
        self.assertEqual(launder('   '), '_')

    def test_boxes_with_date_iso_date_zip(self):
        """Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.zip',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('boxes_with_dates_bc.shp',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date']
                                    }])

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """Tests the import of point_with_date.geojson
        """

        layer = self.generic_import('point_with_date.geojson',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date']
                                    }])

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """Tests the import of test_boxes_with_end_date.

        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import('boxes_with_end_date.shp',
                                    configs=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date', 'enddate'],
                                        'start_date':
                                        'date',
                                        'end_date':
                                        'enddate',
                                        'configureTime':
                                        True
                                    }])

        date_attr = get_layer_attr(layer, 'date_as_date')
        end_date_attr = get_layer_attr(layer, 'enddate_as_date')

        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.catalog.get_layer(layer.name).resource,
                       attribute=date_attr.attribute,
                       end_attribute=end_date_attr.attribute)
        self.generic_time_check(layer,
                                attribute=date_attr.attribute,
                                end_attribute=end_date_attr.attribute)

    def test_us_states_kml(self):
        """Tests the import of us_states_kml.

        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.
        layer = self.generic_import('us_states.kml', configs=[{'index': 0}])
        self.assertEqual(layer.name.lower(), "us_states")

    def test_mojstrovka_gpx(self):
        """Tests the import of mojstrovka.gpx.

        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import('mojstrovka.gpx',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['time'],
                                        'configureTime': True
                                    }])
        date_attr = get_layer_attr(layer, 'time_as_date')
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')

        configure_time(self.catalog.get_layer(layer.name).resource,
                       attribute=date_attr.attribute)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """Convenience method to run generic tests on time layers.
        """
        # TODO: can we use public API or omit this?
        self.catalog._cache.clear()
        resource = self.catalog.get_resource(layer.name,
                                             store=layer.store,
                                             workspace=self.workspace)

        time_info = resource.metadata['time']
        self.assertEqual('LIST', time_info.presentation)
        self.assertEqual(True, time_info.enabled)
        self.assertEqual(attribute, time_info.attribute)
        self.assertEqual(end_attribute, time_info.end_attribute)

    def test_us_shootings_csv(self):
        """Tests the import of US_Shootings.csv.
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')

        path = test_file('US_Shootings.csv')
        layer = self.generic_import(path,
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['Date']
                                    }])
        self.assertTrue(layer.name.startswith('us_shootings'))

        date_field = 'date'
        configure_time(self.catalog.get_layer(layer.name).resource,
                       attribute=date_field)
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """Tests the import of US_Civil_Rights_Sitins0.csv
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')
        layer = self.generic_import('US_Civil_Rights_Sitins0.csv',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['Date']
                                    }])
        self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0')

    def get_layer_names(self, path):
        """Gets layer names from a data source.
        """
        data_source = DataSource(path)
        return [layer.name for layer in data_source]

    def test_gdal_import(self):
        path = test_file('point_with_date.geojson')
        self.generic_import(path,
                            configs=[{
                                'index': 0,
                                'convert_to_date': ['date']
                            }])

    def test_wfs(self):
        """Tests the import from a WFS Endpoint
        """
        wfs = 'WFS:http://demo.geo-solutions.it/geoserver/tiger/wfs'
        ogr = OGRImport(wfs)
        configs = [
            {
                'layer_name': 'tiger:giant_polygon'
            },
            {
                'layer_name': 'tiger:poi'
            },
        ]
        layers = ogr.handle(configuration_options=configs)
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_arcgisjson(self):
        """Tests the import from a WFS Endpoint
        """
        endpoint = 'http://sampleserver6.arcgisonline.com/arcgis/rest/services/Water_Network/FeatureServer/16/query'\
            '?where=objectid=326&outfields=*&f=json'
        ogr = OGRImport(endpoint)
        configs = [{'index': 0}]
        layers = ogr.handle(configuration_options=configs)
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_file_add_view(self):
        """Tests the file_add_view.
        """
        client = AdminClient()

        # test login required for this view
        request = client.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path,
                         reverse('uploads-list'))
        self.assertEqual(len(response.context['object_list']), 1)
        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))

        with open(test_file('empty_file.geojson')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_file_add_view_as_json(self):
        """Tests the file_add_view.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(reverse('uploads-new-json'),
                                   {'file': stream},
                                   follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('application/json', response.get('Content-Type', ''))
        content = json.loads(response.content)
        self.assertIn('state', content)
        self.assertIn('id', content)

    def test_describe_fields(self):
        """Tests the describe fields functionality.
        """
        path = test_file('US_Shootings.csv')
        with GDALInspector(path) as inspector:
            layers = inspector.describe_fields()

        self.assertTrue(layers[0]['layer_name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], [
            'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City',
            'Longitude', 'Latitude'
        ])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """Tests the describe fields functionality.
        """
        filenames = {
            'US_Shootings.csv': {'CSV'},
            'point_with_date.geojson': {'GeoJSON'},
            'mojstrovka.gpx': {'GPX'},
            'us_states.kml': {'LIBKML', 'KML'},
            'boxes_with_year_field.shp': {'ESRI Shapefile'},
            'boxes_with_date_iso_date.zip': {'ESRI Shapefile'}
        }
        from osgeo_importer.models import NoDataSourceFound
        try:
            for filename, file_type in sorted(filenames.items()):
                path = test_file(filename)
                with GDALInspector(path) as inspector:
                    self.assertIn(inspector.file_type(), file_type)
        except NoDataSourceFound:
            logging.exception('No data source found in: {0}'.format(path))
            raise

    def test_configure_view(self):
        """Tests the configuration view.
        """
        path = test_file('point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        client = AdminClient()
        client.login_as_non_admin()

        with open(path) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        upload = response.context['object_list'][0]

        payload = [{
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True,
            'permissions': {
                'users': {
                    'test':
                    new_user_perms,
                    'AnonymousUser': [
                        'change_layer_data', 'download_resourcebase',
                        'view_resourcebase'
                    ]
                }
            }
        }]
        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id),
            data=json.dumps(payload),
            content_type='application/json')

        self.assertTrue(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type,
                        'xsd:dateTime')
        self.assertEqual(Layer.objects.all()[0].owner.username,
                         self.non_admin_user.username)

        perms = first_layer.get_all_level_info()
        user = User.objects.get(username=self.non_admin_user.username)

        # check user permissions
        expected_perms = [
            u'publish_resourcebase', u'change_resourcebase_permissions',
            u'delete_resourcebase', u'change_resourcebase',
            u'change_resourcebase_metadata', u'download_resourcebase',
            u'view_resourcebase', u'change_layer_style', u'change_layer_data'
        ]
        for perm in expected_perms:
            self.assertIn(perm, perms['users'][user])

        self.assertTrue(perms['users'][new_user])
        self.assertIn('change_resourcebase_permissions',
                      perms['users'][new_user])
        self.assertIn(
            'change_layer_data',
            perms['users'][User.objects.get(username='******')])

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, first_layer)

    def test_configure_view_convert_date(self):
        """Tests the configure view with a dataset that needs to be converted to a date.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('US_Shootings.csv')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        upload = response.context['object_list'][0]

        payload = [{
            'index': 0,
            'convert_to_date': ['Date'],
            'start_date': 'Date',
            'configureTime': True,
            'editable': True
        }]

        response = client.get(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 405)

        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 400)

        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id),
            data=json.dumps(payload),
            content_type='application/json')
        self.assertTrue(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type,
                        'xsd:dateTime')
        self.assertTrue(first_layer.attributes.filter(attribute='date'),
                        'xsd:dateTime')

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)

        # ensure a user who does not own the upload cannot configure an import from it.
        client.logout()
        client.login_as_admin()
        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        client = AdminClient()

        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        client.login_as_non_admin()

        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.admin_user.username)
        non_admin = User.objects.get(username=self.non_admin_user.username)

        path = test_file('US_Shootings.csv')
        with open(path, 'rb') as stream:
            uploaded_file = SimpleUploadedFile('test_data', stream.read())
            admin_upload = UploadedData.objects.create(state='Admin test',
                                                       user=admin)
            admin_upload.uploadfile_set.add(
                UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(
                state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(
                UploadFile.objects.create(file=uploaded_file))

        client.login_as_admin()
        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = client.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        client = AdminClient()
        response = client.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        client.login_as_non_admin()

        response = client.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """Ensure users can delete their data.
        """
        client = AdminClient()

        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)
        upload_id = UploadedData.objects.first().id
        response = client.delete('/importer-api/data/{0}/'.format(upload_id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """Ensure that administrators can delete data that isn't theirs.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)

        client.logout()
        client.login_as_admin()

        upload_id = UploadedData.objects.first().id
        response = client.delete('/importer-api/data/{0}/'.format(upload_id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """Tests providing a name in the configuration options.
        """
        client = AdminClient()
        client.login_as_non_admin()
        name = 'point-with-a-date'
        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'name': name,
            'editable': True
        }
        response = client.post('/importer-api/data-layers/1/configure/',
                               data=json.dumps(payload),
                               content_type='application/json')
        self.assertEqual(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """Tests the import api.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(reverse('uploads-new'), {'file': stream},
                                   follow=True)

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True
        }

        self.assertIsInstance(
            UploadLayer.objects.first().configuration_options, dict)

        response = client.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = client.post('/importer-api/data-layers/1/configure/',
                               data=json.dumps([payload]),
                               content_type='application/json')
        self.assertEqual(response.status_code, 200)
        self.assertIn('task', response.content)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type,
                        'xsd:dateTime')

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, first_layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """Test the file extension validator.
        """

        for extension in load_handler(OSGEO_IMPORTER,
                                      'test.txt').valid_extensions:
            filename = 'test.{0}'.format(extension)
            upload = SimpleUploadedFile(filename, '')
            self.assertIsNone(validate_file_extension(upload))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(
                SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(
            SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """Regression test for numeric field overflows in shapefiles.

        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import('Walmart.zip',
                            configs=[{
                                'configureTime': False,
                                'convert_to_date': ['W1_OPENDAT'],
                                'editable': True,
                                'index': 0,
                                'name': 'Walmart',
                                'start_date': 'W1_OPENDAT'
                            }])

    def test_multipolygon_shapefile(self):
        """Tests shapefile with multipart polygons.
        """

        self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}])

    def test_istanbul(self):
        """Tests shapefile with multipart polygons and non-WGS84 SR.
        """
        result = self.generic_import('Istanbul.zip', configs=[{'index': 0}])
        feature_type = self.catalog.get_resource(result.name)
        self.assertEqual(feature_type.projection, 'EPSG:32635')

    def test_houston_tx_annexations(self):
        """Tests Shapefile with originally unsupported EPSG Code.
        """
        result = self.generic_import('HoustonTXAnnexations.zip',
                                     configs=[{
                                         'index': 0
                                     }])
        feature_type = self.catalog.get_resource(result.name)
        self.assertEqual(feature_type.projection, 'EPSG:2278')

    def test_gwc_handler(self):
        """Tests the GeoWebCache handler
        """
        layer = self.generic_import('boxes_with_date.shp',
                                    configs=[{
                                        'index': 0,
                                        'convert_to_date': ['date'],
                                        'start_date': 'date',
                                        'configureTime': True
                                    }])

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.catalog.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
        self.assertIn('regexParameterFilter', payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import('boxes_with_date_iso_date.shp',
                                    configs=[{
                                        'index': 0
                                    }])
        gs_layer = self.catalog.get_layer(layer.name)
        self.catalog._cache.clear()
        gs_layer.fetch()

        payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
        self.assertNotIn('regexParameterFilter', payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

    def test_utf8(self):
        """Tests utf8 characters in attributes
        """
        path = test_file('china_provinces.shp')
        layer = self.generic_import(path)
        ogr = OGRImport(path)
        datastore, _ = ogr.open_target_datastore(ogr.target_store)
        sql = ("select NAME_CH from {0} where NAME_PY = 'An Zhou'".format(
            layer.name))
        result = datastore.ExecuteSQL(sql)
        feature = result.GetFeature(0)
        self.assertEqual(feature.GetField('name_ch'), '安州')

    def test_non_converted_date(self):
        """Test converting a field as date.
        """
        results = self.generic_import('TM_WORLD_BORDERS_2005.zip',
                                      configs=[{
                                          'index': 0,
                                          'start_date': 'Year',
                                          'configureTime': True
                                      }])
        layer = self.catalog.get_layer(results.typename)
        self.assertIn('time', layer.resource.metadata)
        self.assertEqual('year', layer.resource.metadata['time'].attribute)

    def test_fid_field(self):
        """
        Regression test for preserving an FID field when target layer supports
        it but source does not.
        """
        self.generic_import('noaa_paleoclimate.zip', configs=[{'index': 0}])

    def test_csv_with_wkb_geometry(self):
        """Exercise import of CSV files with multiple geometries.
        """
        filenames = [
            'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv',
            'police_csv_WKR.csv', 'police_csv_nFID.csv',
            'police_csv_nOGFID.csv', 'police_csv_noWKB.csv'
        ]

        for filename in filenames:
            self.generic_import(
                filename, {
                    'configureTime': True,
                    'convert_to_date': ['date_time'],
                    'editable': True,
                    'index': 0,
                    'name': filename.lower(),
                    'permissions': {
                        'users': {
                            'AnonymousUser': [
                                'change_layer_data', 'download_resourcebase',
                                'view_resourcebase'
                            ]
                        }
                    },
                    'start_date': 'date_time',
                })
Beispiel #16
0
class UploaderTests(MapStoryTestMixin):
    """
    Basic checks to make sure pages load, etc.
    """
    def create_datastore(self, connection, catalog):
        settings = connection.settings_dict
        params = {
            'database': settings['NAME'],
            'passwd': settings['PASSWORD'],
            'namespace': 'http://www.geonode.org/',
            'type': 'PostGIS',
            'dbtype': 'postgis',
            'host': settings['HOST'],
            'user': settings['USER'],
            'port': settings['PORT'],
            'enabled': "True"
        }

        store = catalog.create_datastore(settings['NAME'],
                                         workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(settings['NAME'])

    def setUp(self):

        if not os.path.exists(
                os.path.join(os.path.split(__file__)[0], 'test_ogr')):
            self.skipTest('Skipping test due to missing test data.')

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.username, self.password = self.create_user('admin',
                                                        'admin',
                                                        is_superuser=True)
        self.non_admin_username, self.non_admin_password = self.create_user(
            'non_admin', 'non_admin')
        self.cat = Catalog(ogc_server_settings.internal_rest,
                           *ogc_server_settings.credentials)
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.cat)

    def tearDown(self):
        """
        Clean up geoserver.
        """
        self.cat.delete(self.datastore, recurse=True)

    def generic_import(self, file, configuration_options=[{'index': 0}]):

        f = file
        filename = os.path.join(os.path.dirname(__file__), 'test_ogr', f)

        res = self.import_file(filename,
                               configuration_options=configuration_options)
        layer = Layer.objects.get(name=res[0][0])
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')

        if not filename.endswith('zip'):
            self.assertTrue(
                layer.attributes.count() >= DataSource(filename)[0].num_fields)

        # make sure we have at least one dateTime attribute
        self.assertTrue('xsd:dateTime' or 'xsd:date'
                        in [n.attribute_type for n in layer.attributes.all()])

        return layer

    def test_box_with_year_field(self):
        """
        Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import('boxes_with_year_field.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """
        Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import('boxes_with_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date'],
                                        'start_date':
                                        'date',
                                        'configureTime':
                                        True
                                    }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """
        Tests a CSV with WKT polygon.
        """

        layer = self.generic_import('boxes_with_date.csv',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_iso_date(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_iso_date_zip(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.zip',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('boxes_with_dates_bc.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('point_with_date.geojson',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """
        Tests the import of test_boxes_with_end_date.
        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import('boxes_with_end_date.shp')

        date_attr = filter(lambda attr: attr.attribute == 'date',
                           layer.attributes)[0]
        end_date_attr = filter(lambda attr: attr.attribute == 'enddate',
                               layer.attributes)[0]

        self.assertEqual(date_attr.attribute_type, 'xsd:date')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:date')

        configure_time(self.cat.get_layer(layer.name).resource,
                       attribute=date_attr.attribute,
                       end_attribute=end_date_attr.attribute)

        self.generic_time_check(layer,
                                attribute=date_attr.attribute,
                                end_attribute=end_date_attr.attribute)

    def test_us_states_kml(self):
        """
        Tests the import of us_states_kml.
        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.

        layer = self.generic_import('us_states.kml')

    def test_mojstrovka_gpx(self):
        """
        Tests the import of mojstrovka.gpx.
        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import('mojstrovka.gpx')
        date_attr = filter(lambda attr: attr.attribute == 'time',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')
        configure_time(self.cat.get_layer(layer.name).resource,
                       attribute=date_attr.attribute)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """
        Convenience method to run generic tests on time layers.
        """
        self.cat._cache.clear()
        resource = self.cat.get_resource(layer.name,
                                         store=layer.store,
                                         workspace=self.workspace)

        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual(attribute, timeInfo.attribute)
        self.assertEqual(end_attribute, timeInfo.end_attribute)

    def test_us_shootings_csv(self):
        """
        Tests the import of US_shootings.csv.
        """

        filename = 'US_shootings.csv'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        layer = self.generic_import(filename,
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['Date']
                                    }])
        self.assertEqual(layer.name, 'us_shootings')

        date_field = 'date_as_date'
        configure_time(self.cat.get_layer(layer.name).resource,
                       attribute=date_field)
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """
        Tests the import of US_shootings.csv.
        """

        filename = 'US_Civil_Rights_Sitins0.csv'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        layer = self.generic_import(f,
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['Date']
                                    }])

    def get_layer_names(self, in_file):
        """
        Gets layer names from a data source.
        """
        ds = DataSource(in_file)
        return map(lambda layer: layer.name, ds)

    def test_gdal_import(self):
        filename = 'point_with_date.geojson'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        self.generic_import(filename,
                            configuration_options=[{
                                'index': 0,
                                'convert_to_date': ['date']
                            }])

    def import_file(self, in_file, configuration_options=[]):
        """
        Imports the file.
        """
        self.assertTrue(os.path.exists(in_file))

        # run ogr2ogr
        gi = GDALImport(in_file)
        layers = gi.handle(configuration_options=configuration_options)

        return layers

    @staticmethod
    def createFeatureType(catalog, datastore, name):
        """
        Exposes a PostGIS feature type in geoserver.
        """
        headers = {"Content-type": "application/xml"}
        data = "<featureType><name>{name}</name></featureType>".format(
            name=name)
        url = datastore.href.replace(".xml",
                                     '/featuretypes.xml'.format(name=name))
        headers, response = catalog.http.request(url, "POST ", data, headers)
        return response

    def test_create_vrt(self):
        """
        Tests the create_vrt function.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'US_shootings.csv')

        vrt = create_vrt(f)
        vrt.seek(0)
        output = vrt.read()

        self.assertTrue('name="US_shootings"' in output)
        self.assertTrue(
            '<SrcDataSource>{0}</SrcDataSource>'.format(f) in output)
        self.assertTrue(
            '<GeometryField encoding="PointFromColumns" x="Longitude" y="Latitude" />'
            .format(f) in output)
        self.assertEqual(os.path.splitext(vrt.name)[1], '.vrt')

    def test_file_add_view(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'point_with_date.geojson')
        c = AdminClient()

        # test login required for this view
        request = c.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path,
                         reverse('uploads-list'))
        self.assertTrue(len(response.context['object_list']) == 1)

        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))

        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'empty_file.geojson')

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_describe_fields(self):
        """
        Tests the describe fields functionality.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'us_shootings.csv')
        fields = None

        with GDALInspector(f) as f:
            layers = f.describe_fields()

        self.assertTrue(layers[0]['name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], [
            'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City',
            'Longitude', 'Latitude'
        ])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """
        Tests the describe fields functionality.
        """
        files = (
            (os.path.join(os.path.dirname(__file__), 'test_ogr',
                          'us_shootings.csv'), 'CSV'),
            (os.path.join(os.path.dirname(__file__), 'test_ogr',
                          'point_with_date.geojson'), 'GeoJSON'),
            (os.path.join(os.path.dirname(__file__), 'test_ogr',
                          'mojstrovka.gpx'), 'GPX'),
            (os.path.join(os.path.dirname(__file__), 'test_ogr',
                          'us_states.kml'), 'KML'),
            (os.path.join(os.path.dirname(__file__), 'test_ogr',
                          'boxes_with_year_field.shp'), 'ESRI Shapefile'),
            (os.path.join(os.path.dirname(__file__), 'test_ogr',
                          'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'),
        )

        for path, file_type in files:
            with GDALInspector(path) as f:
                self.assertEqual(f.file_type(), file_type)

    def test_configure_view(self):
        """
        Tests the configuration view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        upload = response.context['object_list'][0]

        payload = [{
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True
        }]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id),
                          data=json.dumps(payload),
                          content_type='application/json')

        self.assertTrue(response.status_code, 200)
        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(Layer.objects.all()[0].owner.username,
                         self.non_admin_username)

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)

    def test_configure_view_convert_date(self):
        """
        Tests the configure view with a dataset that needs to be converted to a date.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'US_shootings.csv')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        upload = response.context['object_list'][0]

        payload = [{
            'index': 0,
            'convert_to_date': ['Date'],
            'start_date': 'Date',
            'configureTime': True,
            'editable': True
        }]

        response = c.get('/importer-api/data-layers/{0}/configure/'.format(
            upload.id))
        self.assertEqual(response.status_code, 405)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id))
        self.assertEqual(response.status_code, 400)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id),
                          data=json.dumps(payload),
                          content_type='application/json')
        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(layer.attributes.filter(attribute='date_as_date'),
                        'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        # ensure a user who does not own the upload cannot configure an import from it.
        c.logout()
        c.login_as_admin()
        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id))
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        c = AdminClient()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.username)
        non_admin = User.objects.get(username=self.non_admin_username)
        from .models import UploadFile

        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'US_shootings.csv')

        with open(f, 'rb') as f:
            uploaded_file = SimpleUploadedFile('test_data', f.read())
            admin_upload = UploadedData.objects.create(state='Admin test',
                                                       user=admin)
            admin_upload.uploadfile_set.add(
                UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(
                state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(
                UploadFile.objects.create(file=uploaded_file))

        c.login_as_admin()
        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = c.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        c = AdminClient()
        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """
        Ensure users can delete their data.
        """
        c = AdminClient()

        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)
        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """
        Ensure that administrators can delete data that isn't theirs.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)

        c.logout()
        c.login_as_admin()

        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """
        Tests providing a name in the configuration options.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()
        name = 'point-with-a-date'
        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'name': name,
            'editable': True
        }

        response = c.post('/importer-api/data-layers/1/configure/',
                          data=json.dumps(payload),
                          content_type='application/json')

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """
        Tests the import api.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr',
                         'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True
        }

        self.assertTrue(
            isinstance(UploadLayer.objects.first().configuration_options,
                       dict))

        response = c.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = c.post('/importer-api/data-layers/1/configure/',
                          data=json.dumps([payload]),
                          content_type='application/json')

        self.assertEqual(response.status_code, 200)
        self.assertTrue('task' in response.content)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """
        Test the file extension validator.
        """

        for extension in IMPORTER_VALID_EXTENSIONS:
            self.assertIsNone(
                validate_file_extension(
                    SimpleUploadedFile('test.{0}'.format(extension), '')))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """
        Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(
                SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(
            SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """
        Regression test for numeric field overflows in shapefiles.
        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import('Walmart.zip',
                            configuration_options=[{
                                'index': 0,
                                'convert_to_date': []
                            }])

    def test_outside_bounds_regression(self):
        """
        Regression where layers with features outside projection bounds fail.
        """
        self.generic_import('Spring_2015.zip',
                            configuration_options=[{
                                'index': 0
                            }])
        resource = self.cat.get_layer('spring_2015').resource
        self.assertEqual(resource.latlon_bbox,
                         ('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326'))

    def test_gwc_handler(self):
        """
        Tests the GeoWebCache handler
        """
        layer = self.generic_import('boxes_with_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date'],
                                        'start_date':
                                        'date',
                                        'configureTime':
                                        True
                                    }])

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.cat.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertTrue('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import('boxes_with_date_iso_date.shp',
                                    configuration_options=[{
                                        'index': 0
                                    }])
        gs_layer = self.cat.get_layer(layer.name)
        self.cat._cache.clear()
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertFalse('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)
Beispiel #17
0
    logging.info("Uploading " + str(len(files)) + " styles")
    for f in files:
        with open(styleDir + f, 'r') as style:
            styleName = f.rstrip('.xml')
            cat.create_style(styleName, style.read(), workspace=workspace, overwrite=True)
            styles.append(workspace + ':' + styleName)
    break

# assign styles to created layers and enable timeDimension
layers = cat.get_layers()
for layer in layers:
    # if layer.dom is not None:
    if layer.name.startswith(workspace):
        # GetStyleName
        layerName = layer.resource.name
        # Set Default Style (timeIndependend)
        layer.default_style = workspace + ":" + layerName
        layer.styles = [s for s in styles if s.startswith(workspace + ":" + layerName)]
        cat.save(layer)
        # get coverage to activate time Dimension
        from geoserver.support import DimensionInfo
        coverage = cat.get_resource(layerName, projectName, workspace=workspace)
        timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None)
        coverage.metadata = ({'time': timeInfo})
        coverage.CheckAuxiliaryMetadata=True
        cat.save(coverage)
if 'removeOutputFiles' in cfg['general'] and cfg['general']['removeOutputFiles'] is not False:
    shutil.rmtree(cfg['general']['workdir'] + '/outputFiles/' + projectName + '/')
    logging.info("Deleted outputFiles")
logging.info('App can be started at: ' + cfg['frontend']['path'] + '/projects/' + projectName + '/index.html')
Beispiel #18
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assert_(self.cat.get_layer('import') is None)
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assert_(self.cat.get_layer('import') is not None)
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            self.cat.delete(ds)

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(
            ds, "import", {
                'shp': 'test/data/states.shp',
                'shx': 'test/data/states.shx',
                'dbf': 'test/data/states.dbf',
                'prj': 'test/data/states.prj'
            })

    def testCoverageStoreCreate(self):
        ds = self.cat.create_coveragestore2("coverage_gsconfig")
        ds.data_url = "file:data/mytiff.tiff"
        self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs)

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(
            ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before,
                          str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after,
                          str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before,
                          str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after,
                          str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats,
                          str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after,
                          str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        ws.type = "WMS"
        self.cat.save(ws)

    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            # sanitize the layer name - validation will fail on newer geoservers
            name = layer.replace(':', '_')
            new_layer = self.cat.create_wmslayer(wmstest,
                                                 wmsstore,
                                                 name,
                                                 nativeName=layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

        changed_layer = added_layers[0]
        self.assertEqual(True, changed_layer.advertised)
        self.assertEqual(True, changed_layer.enabled)
        changed_layer.advertised = False
        changed_layer.enabled = False
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        changed_layer = wmsstore.get_resources()[0]
        changed_layer.fetch()
        self.assertEqual(False, changed_layer.advertised)
        self.assertEqual(False, changed_layer.enabled)

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])

        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars,
                                     sf)

        self.assert_(
            self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError, lambda: self.cat.create_featurestore(
                "states_test", shapefile_plus_sidecars, sf))

        self.assertRaises(
            UploadError, lambda: self.cat.create_coveragestore(
                "states_raster_test", shapefile_plus_sidecars, sf))

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf))

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)

    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        # TODO: Uploading WorldImage file no longer works???
        # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        # self.assertRaises(
        #         ConflictingDataError,
        #         lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        # )

        self.assertRaises(
            UploadError, lambda: self.cat.create_featurestore(
                "Pk50095_vector", tiffdata, sf))

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff))

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = "Not the original attribution"

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")

        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles
                             if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])

    def testStyles(self):
        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred",
                              open("test/ted.sld").read(),
                              overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assert_(f is not None)
        self.assertEqual(f.name, fred.name)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("jed",
                              open("test/fred.sld").read(),
                              workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assert_(jed is None)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        self.cat.create_style("jed",
                              open("test/ted.sld").read(),
                              overwrite=True,
                              workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assert_(self.cat.get_style("jed", workspace="topp") is None)

        # attempt creating new style
        self.cat.create_style("jed",
                              open("test/fred.sld").read(),
                              workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assert_(f is not None)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("ned",
                              open("test/fred.sld").read(),
                              overwrite=True,
                              workspace="topp")
        self.cat.create_style("zed",
                              open("test/ted.sld").read(),
                              overwrite=True,
                              workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assert_(ned is not None)
        self.assert_(zed is not None)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        self.assertEqual("topp:ned", lyr.default_style.fqn)
        self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self):
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

    def testTimeDimension(self):
        sf = self.cat.get_workspace("sf")
        files = shapefile_and_friends(
            os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date"))
        self.cat.create_featurestore("boxes_with_end_date", files, sf)

        get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer(
            'boxes_with_end_date').resource

        # configure time as LIST
        resource = get_resource()
        timeInfo = DimensionInfo("time",
                                 "true",
                                 "LIST",
                                 None,
                                 "ISO8601",
                                 None,
                                 attribute="date")
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual("date", timeInfo.attribute)
        self.assertEqual("ISO8601", timeInfo.units)

        # disable time dimension
        timeInfo = resource.metadata['time']
        timeInfo.enabled = False
        # since this is an xml property, it won't get written unless we modify it
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(False, timeInfo.enabled)

        # configure with interval, end_attribute and enable again
        timeInfo.enabled = True
        timeInfo.presentation = 'DISCRETE_INTERVAL'
        timeInfo.resolution = '3 days'
        timeInfo.end_attribute = 'enddate'
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation)
        self.assertEqual('3 days', timeInfo.resolution_str())
        self.assertEqual('enddate', timeInfo.end_attribute)

    def testImageMosaic(self):
        # testing the mosaic creation
        name = 'cea_mosaic'
        data = open('test/data/mosaic/cea.zip', 'rb')
        self.cat.create_imagemosaic(name, data)

        # get the layer resource back
        self.cat._cache.clear()
        resource = self.cat.get_layer(name).resource

        self.assert_(resource is not None)

        # delete granule from mosaic
        coverage = name
        store = name
        granule_id = name + '.1'
        self.cat.mosaic_delete_granule(coverage, store, granule_id)
Beispiel #19
0
class ModifyingTests(unittest.TestCase):
  def setUp(self):
    self.cat = Catalog("http://localhost:8080/geoserver/rest")


  def testFeatureTypeSave(self):
    # test saving round trip
    rs = self.cat.get_resource("bugsites")
    old_abstract = rs.abstract
    new_abstract = "Not the original abstract"

    # Change abstract on server
    rs.abstract = new_abstract
    self.cat.save(rs)
    rs = self.cat.get_resource("bugsites")
    self.assertEqual(new_abstract, rs.abstract)

    # Restore abstract
    rs.abstract = old_abstract
    self.cat.save(rs)
    rs = self.cat.get_resource("bugsites")
    self.assertEqual(old_abstract, rs.abstract)


  def testCoverageSave(self):
    # test saving round trip
    rs = self.cat.get_resource("Arc_Sample")
    old_abstract = rs.abstract
    new_abstract = "Not the original abstract"

    # # Change abstract on server
    rs.abstract = new_abstract
    self.cat.save(rs)
    rs = self.cat.get_resource("Arc_Sample")
    self.assertEqual(new_abstract, rs.abstract)

    # Restore abstract
    rs.abstract = old_abstract
    self.cat.save(rs)
    rs = self.cat.get_resource("Arc_Sample")
    self.assertEqual(old_abstract, rs.abstract)

  def testFeatureTypeCreate(self):
    shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
    expected = {
      'shp': 'test/data/states.shp',
      'shx': 'test/data/states.shx',
      'dbf': 'test/data/states.dbf',
      'prj': 'test/data/states.prj'
    }

    self.assertEqual(len(expected), len(shapefile_plus_sidecars))
    for k, v in expected.iteritems():
      self.assertEqual(v, shapefile_plus_sidecars[k])
 
    sf = self.cat.get_workspace("sf")
    ft = self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)

    self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None)

    self.assertRaises(
        ConflictingDataError, 
        lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)
    )

    self.assertRaises(
        UploadError,
        lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf)
    )

    bogus_shp = {
      'shp': 'test/data/Pk50095.tif',
      'shx': 'test/data/Pk50095.tif',
      'dbf':  'test/data/Pk50095.tfw',
      'prj':  'test/data/Pk50095.prj'
    }

    self.assertRaises(
        UploadError,
        lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)
    )

  def testCoverageCreate(self):
    tiffdata = {
      'tiff': 'test/data/Pk50095.tif',
      'tfw':  'test/data/Pk50095.tfw',
      'prj':  'test/data/Pk50095.prj'
    }

    sf = self.cat.get_workspace("sf")
    ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

    self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

    self.assertRaises(
        ConflictingDataError, 
        lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
    )

    self.assertRaises(
        UploadError, 
        lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf)
    )

    bogus_tiff = {
        'tiff': 'test/data/states.shp',
        'tfw': 'test/data/states.shx',
        'prj': 'test/data/states.prj'
    }

    self.assertRaises(
        UploadError,
        lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)
    )

  def testLayerSave(self):
    # test saving round trip
    lyr = self.cat.get_layer("states")
    old_attribution = lyr.attribution
    new_attribution = "Not the original attribution"

    # change attribution on server
    lyr.attribution = new_attribution
    self.cat.save(lyr)
    lyr = self.cat.get_layer("states")
    self.assertEqual(new_attribution, lyr.attribution)

    # Restore attribution
    lyr.attribution = old_attribution
    self.cat.save(lyr)
    self.assertEqual(old_attribution, lyr.attribution)


  def testLayerDelete(self):
    lyr = self.cat.get_layer("states_test") 
    self.cat.delete(lyr)
    self.assert_(self.cat.get_layer("states_test") is None)
    
  def testWorkspaceDelete(self): 
    pass 

  def testFeatureTypeDelete(self):
    pass

  def testCoverageDelete(self):
    pass

  def testDataStoreDelete(self):
    pass
Beispiel #20
0
class UploaderTests(MapStoryTestMixin):
    """
    Basic checks to make sure pages load, etc.
    """

    def create_datastore(self, connection, catalog):
        settings = connection.settings_dict
        params = {'database': settings['NAME'],
                  'passwd': settings['PASSWORD'],
                  'namespace': 'http://www.geonode.org/',
                  'type': 'PostGIS',
                  'dbtype': 'postgis',
                  'host': settings['HOST'],
                  'user': settings['USER'],
                  'port': settings['PORT'],
                  'enabled': "True"}

        store = catalog.create_datastore(settings['NAME'], workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(settings['NAME'])

    def setUp(self):

        if not os.path.exists(os.path.join(os.path.split(__file__)[0], 'test_ogr')):
            self.skipTest('Skipping test due to missing test data.')

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.username, self.password = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin')
        self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials)
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.cat)

    def tearDown(self):
        """
        Clean up geoserver.
        """
        self.cat.delete(self.datastore, recurse=True)

    def generic_import(self, file, configuration_options=[{'index': 0}]):

        f = file
        filename = os.path.join(os.path.dirname(__file__), 'test_ogr', f)

        res = self.import_file(filename, configuration_options=configuration_options)

        layer = Layer.objects.get(name=res[0][0])
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')

        if not filename.endswith('zip'):
            self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields)

        # make sure we have at least one dateTime attribute
        self.assertTrue('xsd:dateTime' or 'xsd:date' in [n.attribute_type for n in layer.attributes.all()])

        return layer

    def test_box_with_year_field(self):
        """
        Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """
        Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date'],
                                                                                   'start_date': 'date',
                                                                                   'configureTime': True
                                                                                   }])

        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """
        Tests a CSV with WKT polygon.
        """

        layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_schema_csv(self):
        """
        Tests a CSV from schema download.
        """
        layer = self.generic_import('schema_download.csv', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_iso_date(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_duplicate_imports(self):
        """
        Tests importing the same layer twice to ensure incrementing file names is properly handled.
        """
        filename = os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip')

        gi = GDALImport(filename)
        layers1 = gi.handle({'index': 0, 'name': 'test'})
        layers2 = gi.handle({'index': 0,  'name': 'test'})

        self.assertEqual(layers1[0][0], 'test')
        self.assertEqual(layers2[0][0], 'test0')

    def test_boxes_with_date_iso_date_zip(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])

        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))
        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """
        Tests the import of test_boxes_with_end_date.
        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import('boxes_with_end_date.shp')

        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        end_date_attr = filter(lambda attr: attr.attribute == 'enddate', layer.attributes)[0]

        self.assertEqual(date_attr.attribute_type, 'xsd:date')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:date')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,
                       end_attribute=end_date_attr.attribute)

        self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute)

    def test_us_states_kml(self):
        """
        Tests the import of us_states_kml.
        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.

        layer = self.generic_import('us_states.kml')

    def test_mojstrovka_gpx(self):
        """
        Tests the import of mojstrovka.gpx.
        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import('mojstrovka.gpx')
        date_attr = filter(lambda attr: attr.attribute == 'time', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """
        Convenience method to run generic tests on time layers.
        """
        self.cat._cache.clear()
        resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace)

        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual(attribute, timeInfo.attribute)
        self.assertEqual(end_attribute, timeInfo.end_attribute)

    def test_us_shootings_csv(self):
        """
        Tests the import of US_shootings.csv.
        """

        filename = 'US_shootings.csv'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}])
        self.assertEqual(layer.name, 'us_shootings')

        date_field = 'date'
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field)
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """
        Tests the import of US_shootings.csv.
        """

        filename = 'US_Civil_Rights_Sitins0.csv'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        layer = self.generic_import(f, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}])

    def get_layer_names(self, in_file):
        """
        Gets layer names from a data source.
        """
        ds = DataSource(in_file)
        return map(lambda layer: layer.name, ds)

    def test_gdal_import(self):
        filename = 'point_with_date.geojson'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        self.generic_import(filename, configuration_options=[{'index': 0,  'convert_to_date': ['date']}])

    def import_file(self, in_file, configuration_options=[]):
        """
        Imports the file.
        """
        self.assertTrue(os.path.exists(in_file))

        # run ogr2ogr
        gi = GDALImport(in_file)
        layers = gi.handle(configuration_options=configuration_options)

        return layers

    @staticmethod
    def createFeatureType(catalog, datastore, name):
        """
        Exposes a PostGIS feature type in geoserver.
        """
        headers = {"Content-type": "application/xml"}
        data = "<featureType><name>{name}</name></featureType>".format(name=name)
        url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name))
        headers, response = catalog.http.request(url, "POST ", data, headers)
        return response

    def test_create_vrt(self):
        """
        Tests the create_vrt function.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv')

        vrt = create_vrt(f)
        vrt.seek(0)
        output = vrt.read()

        self.assertTrue('name="US_shootings"' in output)
        self.assertTrue('<SrcDataSource>{0}</SrcDataSource>'.format(f) in output)
        self.assertTrue('<GeometryField encoding="PointFromColumns" x="Longitude" y="Latitude" />'.format(f) in output)
        self.assertEqual(os.path.splitext(vrt.name)[1], '.vrt')

    def test_file_add_view(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()

        # test login required for this view
        request = c.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path, reverse('uploads-list'))
        self.assertTrue(len(response.context['object_list']) == 1)

        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))


        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'empty_file.geojson')

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_file_add_view_as_json(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('application/json', response.get('Content-Type', ''))
        content = json.loads(response.content)
        self.assertIn('state', content)
        self.assertIn('id', content)

    def test_describe_fields(self):
        """
        Tests the describe fields functionality.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv')
        fields = None

        with GDALInspector(f) as f:
            layers = f.describe_fields()

        self.assertTrue(layers[0]['name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed',
                                                                    'Wounded', 'Location', 'City',
                                                                    'Longitude', 'Latitude'])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """
        Tests the describe fields functionality.
        """
        files = ((os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv'), 'CSV'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson'), 'GeoJSON'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'mojstrovka.gpx'), 'GPX'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_states.kml'), 'KML'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_year_field.shp'), 'ESRI Shapefile'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'),
        )

        for path, file_type in files:
            with GDALInspector(path) as f:
                self.assertEqual(f.file_type(), file_type)

    def test_append(self):
        """
        Tests the configuration view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'name': 'append',
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True,
                    'editable': True,
                    'permissions': {'users': {'test': new_user_perms,
                                              'AnonymousUser': ["change_layer_data", "download_resourcebase",
                                                                "view_resourcebase"]}}}]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        cursor = db.connections['datastore'].cursor()
        cursor.execute('select count(*) from append')
        self.assertEqual(1,cursor.fetchone()[0])

        payload[0]['appendTo'] = 'geonode:append'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date_2.geojson')


        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        cursor = db.connections['datastore'].cursor()
        cursor.execute('select count(*) from append')
        self.assertEqual(2,cursor.fetchone()[0])

    def test_trunc_append(self):
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'long_attr_name.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'name': 'append',
                    'convert_to_date': ['date_as_date'],
                    'start_date': 'date_as_date',
                    'configureTime': True,
                    'editable': True,
                    'permissions': {'users': {'test': new_user_perms,
                                              'AnonymousUser': ["change_layer_data", "download_resourcebase",
                                                                "view_resourcebase"]}}}]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        cursor = db.connections['datastore'].cursor()
        cursor.execute('select count(*) from append')
        self.assertEqual(1, cursor.fetchone()[0])

        payload[0]['appendTo'] = 'geonode:append'
        payload[0]['convert_to_date'] = ['date_as_da']
        payload[0]['start_date'] = 'date_as_da'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'long_attr_trunc.geojson')


        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        cursor = db.connections['datastore'].cursor()
        cursor.execute('select count(*), date from append group by date')
        result = cursor.fetchone()
        #ensure that the feature was added and the attribute was appended
        self.assertEqual(2,result[0])
        self.assertNotEqual(None,result[1])

    def test_schema_append(self):
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'schema_initial.zip')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'name': 'append',
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True,
                    'editable': True,
                    'permissions': {'users': {'test': new_user_perms,
                                              'AnonymousUser': ["change_layer_data", "download_resourcebase",
                                                                "view_resourcebase"]}}}]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        cursor = db.connections['datastore'].cursor()
        cursor.execute('select count(*) from append')
        self.assertEqual(1, cursor.fetchone()[0])

        payload[0]['appendTo'] = 'geonode:append'
        payload[0]['convert_to_date'] = ['date']
        payload[0]['start_date'] = 'date'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'schema_append.zip')


        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        cursor = db.connections['datastore'].cursor()
        cursor.execute('select count(*), date from append group by date')
        result = cursor.fetchone()
        import pdb;pdb.set_trace()
        #ensure that the feature was added and the attribute was appended
        self.assertEqual(2,result[0])
        self.assertNotEqual(None,result[1])

    def test_configure_view(self):
        """
        Tests the configuration view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True,
                    'editable': True,
                    'permissions': {'users': {'test': new_user_perms,
                                              'AnonymousUser': ["change_layer_data", "download_resourcebase",
                                                                "view_resourcebase"]}}}]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username)

        perms = layer.get_all_level_info()
        user = User.objects.get(username=self.non_admin_username)

        # check user permissions
        for perm in [u'publish_resourcebase', u'change_resourcebase_permissions',
                     u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata',
                     u'download_resourcebase', u'view_resourcebase', u'change_layer_style',
                     u'change_layer_data']:
            self.assertIn(perm, perms['users'][user])

        self.assertTrue(perms['users'][new_user])
        self.assertIn('change_resourcebase_permissions', perms['users'][new_user])

        self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')])

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)

    def test_configure_view_convert_date(self):
        """
        Tests the configure view with a dataset that needs to be converted to a date.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'convert_to_date': ['Date'],
                    'start_date': 'Date',
                    'configureTime': True,
                    'editable': True}]


        response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 405)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 400)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')
        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        # ensure a user who does not own the upload cannot configure an import from it.
        c.logout()
        c.login_as_admin()
        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        c = AdminClient()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.username)
        non_admin = User.objects.get(username=self.non_admin_username)
        from .models import UploadFile

        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv')

        with open(f, 'rb') as f:
            uploaded_file = SimpleUploadedFile('test_data', f.read())
            admin_upload = UploadedData.objects.create(state='Admin test', user=admin)
            admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

        c.login_as_admin()
        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = c.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        c = AdminClient()
        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """
        Ensure users can delete their data.
        """
        c = AdminClient()

        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)
        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """
        Ensure that administrators can delete data that isn't theirs.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)

        c.logout()
        c.login_as_admin()

        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)


    def naming_an_import(self):
        """
        Tests providing a name in the configuration options.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()
        name = 'point-with-a-date'
        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        payload = {'index': 0,
                   'convert_to_date': ['date'],
                   'start_date': 'date',
                   'configureTime': True,
                   'name': name,
                   'editable': True}

        response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload),
                          content_type='application/json')

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """
        Tests the import api.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        payload = {'index': 0,
                   'convert_to_date': ['date'],
                   'start_date': 'date',
                   'configureTime': True,
                   'editable': True}

        self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict))

        response = c.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]),
                          content_type='application/json')

        self.assertEqual(response.status_code, 200)
        self.assertTrue('task' in response.content)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """
        Test the file extension validator.
        """

        for extension in IMPORTER_VALID_EXTENSIONS:
            self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), '')))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """
        Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """
        Regression test for numeric field overflows in shapefiles.
        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import('Walmart.zip', configuration_options=[{'index': 0, 'convert_to_date': []}])

    def test_outside_bounds_regression(self):
        """
        Regression where layers with features outside projection bounds fail.
        """
        self.generic_import('Spring_2015.zip', configuration_options=[{'index': 0 }])
        resource = self.cat.get_layer('spring_2015').resource
        self.assertEqual(resource.latlon_bbox, ('-180.0', '180.0', '-90.0', '90.0', 'EPSG:4326'))

    def test_multipolygon_shapefile(self):
        """
        Tests shapefile with multipart polygons.
        """

        self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}])

    def test_gwc_handler(self):
        """
        Tests the GeoWebCache handler
        """
        layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date'],
                                                                                   'start_date': 'date',
                                                                                   'configureTime': True
                                                                                   }])

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.cat.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertTrue('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}])
        gs_layer = self.cat.get_layer(layer.name)
        self.cat._cache.clear()
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertFalse('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)
Beispiel #21
0
class CatalogTests(unittest.TestCase):
  def setUp(self):
    self.cat = Catalog("http://localhost:8080/geoserver/rest")


  def testWorkspaces(self):
    self.assertEqual(7, len(self.cat.get_workspaces()))
    # marking out test since geoserver default workspace is not consistent 
    # self.assertEqual("cite", self.cat.get_default_workspace().name)
    self.assertEqual("topp", self.cat.get_workspace("topp").name)


  def testStores(self):
    topp = self.cat.get_workspace("topp")
    sf = self.cat.get_workspace("sf")
    self.assertEqual(9, len(self.cat.get_stores()))
    self.assertEqual(2, len(self.cat.get_stores(topp)))
    self.assertEqual(2, len(self.cat.get_stores(sf)))
    self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name)
    self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name)
    self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name)
    self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name)
    self.assertEqual("sfdem", self.cat.get_store("sfdem").name)

  
  def testResources(self):
    topp = self.cat.get_workspace("topp")
    sf = self.cat.get_workspace("sf")
    states = self.cat.get_store("states_shapefile", topp)
    sfdem = self.cat.get_store("sfdem", sf)
    self.assertEqual(19, len(self.cat.get_resources()))
    self.assertEqual(1, len(self.cat.get_resources(states)))
    self.assertEqual(5, len(self.cat.get_resources(workspace=topp)))
    self.assertEqual(1, len(self.cat.get_resources(sfdem)))
    self.assertEqual(6, len(self.cat.get_resources(workspace=sf)))

    self.assertEqual("states", self.cat.get_resource("states", states).name)
    self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name)
    self.assertEqual("states", self.cat.get_resource("states").name)
    states = self.cat.get_resource("states")

    fields = [
        states.title,
        states.abstract,
        states.native_bbox,
        states.latlon_bbox,
        states.projection,
        states.projection_policy
    ]

    self.assertFalse(None in fields, str(fields))
    self.assertFalse(len(states.keywords) == 0)
    self.assertFalse(len(states.attributes) == 0)
    self.assertTrue(states.enabled)

    self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name)
    self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name)
    self.assertEqual("sfdem", self.cat.get_resource("sfdem").name)


  def testLayers(self):
    expected = set(["Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem",
      "bugsites", "restricted", "streams", "archsites", "roads",
      "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries",
      "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi",
      "giant_polygon"
    ])
    actual = set(l.name for l in self.cat.get_layers())
    missing = expected - actual
    extras = actual - expected
    message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
    self.assert_(len(expected ^ actual) == 0, message)

    states = self.cat.get_layer("states")

    self.assert_("states", states.name)
    self.assert_(isinstance(states.resource, ResourceInfo))
    self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon']))
    self.assertEqual(states.default_style.name, "population")

  def testLayerGroups(self):
    expected = set(["tasmania", "tiger-ny", "spearfish"])
    actual = set(l.name for l in self.cat.get_layergroups())
    missing = expected - actual
    extras = actual - expected
    message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
    self.assert_(len(expected ^ actual) == 0, message)

    tas = self.cat.get_layergroup("tasmania")

    self.assert_("tasmania", tas.name)
    self.assert_(isinstance(tas, LayerGroup))
    self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
    self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

  def testStyles(self):
    self.assertEqual(20, len(self.cat.get_styles()))
    self.assertEqual("population", self.cat.get_style("population").name)
Beispiel #22
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)
        
        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(
                        [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
                        rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)


        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(
            host="localhost", port="5432", database="db", user="******",
            passwd="password", dbtype="postgis")
        self.cat.save(ds)

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(
            host="localhost", port="5432", database="db", user="******",
            passwd="password", dbtype="postgis")
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(ds, "import", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })

    def testCoverageStoreCreate(self):
        ds = self.cat.create_coveragestore2("coverage_gsconfig")
        ds.data_url = "file:data/mytiff.tiff"
        self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs) 

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(
            [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
            rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats))


    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])
 
        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)

        self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError, 
            lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf)
        )

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf':    'test/data/Pk50095.tfw',
            'prj':    'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)
        )

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)


    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw':    'test/data/Pk50095.tfw',
            'prj':    'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        # TODO: Uploading WorldImage file no longer works???
        # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        # self.assertRaises(
        #         ConflictingDataError, 
        #         lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        # )

        self.assertRaises(
            UploadError, 
            lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf)
        )

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)
        )

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = "Not the original attribution"

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")
     
        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])


    def testStyles(self):
        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self): 
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)
Beispiel #23
0
class UploaderTests(DjagnoOsgeoMixin):
    """
    Basic checks to make sure pages load, etc.
    """
    def create_datastore(self, connection, catalog):
        settings = connection.settings_dict
        params = {
            'database': settings['NAME'],
            'passwd': settings['PASSWORD'],
            'namespace': 'http://www.geonode.org/',
            'type': 'PostGIS',
            'dbtype': 'postgis',
            'host': settings['HOST'],
            'user': settings['USER'],
            'port': settings['PORT'],
            'enabled': "True"
        }

        store = catalog.create_datastore(settings['NAME'],
                                         workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(settings['NAME'])

    def setUp(self):

        if not os.path.exists(
                os.path.join(
                    os.path.split(__file__)[0], '..', 'importer-test-files')):
            self.skipTest('Skipping test due to missing test data.')

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.username, self.password = self.create_user('admin',
                                                        'admin',
                                                        is_superuser=True)
        self.non_admin_username, self.non_admin_password = self.create_user(
            'non_admin', 'non_admin')
        self.cat = Catalog(ogc_server_settings.internal_rest,
                           *ogc_server_settings.credentials)
        if self.cat.get_workspace('geonode') == None:
            self.cat.create_workspace('geonode', 'http://www.geonode.org/')
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.cat)

    def tearDown(self):
        """
        Clean up geoserver.
        """
        self.cat.delete(self.datastore, recurse=True)

    def generic_import(self, file, configuration_options=[{'index': 0}]):

        f = file
        filename = os.path.join(os.path.dirname(__file__), '..',
                                'importer-test-files', f)

        res = self.import_file(filename,
                               configuration_options=configuration_options)
        layer_results = []

        for result in res:
            if result[1].get('raster'):
                layerfile = result[0]
                layername = os.path.splitext(os.path.basename(layerfile))[0]
                layer = Layer.objects.get(name=layername)
                self.assertTrue(layerfile.endswith('.tif'))
                self.assertTrue(os.path.exists(layerfile))
                l = gdal.OpenEx(layerfile)
                self.assertTrue(l.GetDriver().ShortName, 'GTiff')
                layer_results.append(layer)
            else:
                layer = Layer.objects.get(name=result[0])
                self.assertEqual(layer.srid, 'EPSG:4326')
                self.assertEqual(layer.store, self.datastore.name)
                self.assertEqual(layer.storeType, 'dataStore')

                if not filename.endswith('zip'):
                    self.assertTrue(layer.attributes.count() >= DataSource(
                        filename)[0].num_fields)

                layer_results.append(layer)

        return layer_results[0]

    def generic_raster_import(self,
                              file,
                              configuration_options=[{
                                  'index': 0
                              }]):
        f = file
        filename = os.path.join(os.path.dirname(__file__), '..',
                                'importer-test-files', f)
        res = self.import_file(filename,
                               configuration_options=configuration_options)
        layerfile = res[0][0]
        layername = os.path.splitext(os.path.basename(layerfile))[0]
        layer = Layer.objects.get(name=layername)
        self.assertTrue(layerfile.endswith('.tif'))
        self.assertTrue(os.path.exists(layerfile))
        l = gdal.OpenEx(layerfile)
        self.assertTrue(l.GetDriver().ShortName, 'GTiff')
        return layer

    def test_raster(self):
        """
        Tests raster import
        """
        layer = self.generic_raster_import('test_grid.tif',
                                           configuration_options=[{
                                               'index': 0
                                           }])

    def test_box_with_year_field(self):
        """
        Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import('boxes_with_year_field.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """
        Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import('boxes_with_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date'],
                                        'start_date':
                                        'date',
                                        'configureTime':
                                        True
                                    }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_gpkg(self):
        """
        Tests the import of test_boxes_with_date.gpkg.
        """

        layer = self.generic_import('boxes_with_date.gpkg',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date'],
                                        'start_date':
                                        'date',
                                        'configureTime':
                                        True
                                    }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_plus_raster_gpkg_by_index(self):
        """
        Tests the import of multilayer vector + raster geopackage using index
        """

        layer = self.generic_import('boxes_plus_raster.gpkg',
                                    configuration_options=[
                                        {
                                            'index': 0,
                                            'convert_to_date': ['date'],
                                            'start_date': 'date',
                                            'configureTime': True
                                        },
                                        {
                                            'index': 1
                                        },
                                        {
                                            'index': 2
                                        },
                                        {
                                            'index': 3
                                        },
                                        {
                                            'index': 4
                                        },
                                        {
                                            'index': 5
                                        },
                                        {
                                            'index': 6
                                        },
                                        {
                                            'index': 7
                                        },
                                    ])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """
        Tests a CSV with WKT polygon.
        """

        layer = self.generic_import('boxes_with_date.csv',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_csv_missing_features(self):
        """
        Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding.
        """

        self.generic_import('missing-features.csv',
                            configuration_options=[{
                                'index': 0
                            }])

    def test_boxes_with_iso_date(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_duplicate_imports(self):
        """
        Tests importing the same layer twice to ensure incrementing file names is properly handled.
        """
        filename = os.path.join(os.path.dirname(__file__), '..',
                                'importer-test-files',
                                'boxes_with_date_iso_date.zip')

        gi = OGRImport(filename)
        layers1 = gi.handle({'index': 0, 'name': 'test'})
        layers2 = gi.handle({'index': 0, 'name': 'test'})

        self.assertEqual(layers1[0][0], 'test')
        self.assertEqual(layers2[0][0], 'test0')

    def test_launder(self):
        """
        Ensure the launder function works as expected.
        """
        self.assertEqual(launder('tm_world_borders_simpl_0.3'),
                         'tm_world_borders_simpl_0_3')
        self.assertEqual(launder('Testing#'), 'testing_')
        self.assertEqual(launder('   '), '_')

    def test_boxes_with_date_iso_date_zip(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.zip',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('boxes_with_dates_bc.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """
        Tests the import of point_with_date.geojson
        """

        layer = self.generic_import('point_with_date.geojson',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date']
                                    }])

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.cat.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """
        Tests the import of test_boxes_with_end_date.
        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import('boxes_with_end_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date', 'enddate'],
                                        'start_date':
                                        'date',
                                        'end_date':
                                        'enddate',
                                        'configureTime':
                                        True
                                    }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date',
                           layer.attributes)[0]
        end_date_attr = filter(
            lambda attr: attr.attribute == 'enddate_as_date',
            layer.attributes)[0]

        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource,
                       attribute=date_attr.attribute,
                       end_attribute=end_date_attr.attribute)

        self.generic_time_check(layer,
                                attribute=date_attr.attribute,
                                end_attribute=end_date_attr.attribute)

    def test_us_states_kml(self):
        """
        Tests the import of us_states_kml.
        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.

        layer = self.generic_import('us_states.kml',
                                    configuration_options=[{
                                        'index': 0
                                    }])

    def test_mojstrovka_gpx(self):
        """
        Tests the import of mojstrovka.gpx.
        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import('mojstrovka.gpx',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['time'],
                                        'configureTime':
                                        True
                                    }])
        date_attr = filter(lambda attr: attr.attribute == 'time_as_date',
                           layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')
        configure_time(self.cat.get_layer(layer.name).resource,
                       attribute=date_attr.attribute)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """
        Convenience method to run generic tests on time layers.
        """
        self.cat._cache.clear()
        resource = self.cat.get_resource(layer.name,
                                         store=layer.store,
                                         workspace=self.workspace)

        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual(attribute, timeInfo.attribute)
        self.assertEqual(end_attribute, timeInfo.end_attribute)

    def test_us_shootings_csv(self):
        """
        Tests the import of US_Shootings.csv.
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')

        filename = 'US_Shootings.csv'
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', filename)
        layer = self.generic_import(filename,
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['Date']
                                    }])
        self.assertTrue(layer.name.startswith('us_shootings'))

        date_field = 'date'
        configure_time(self.cat.get_layer(layer.name).resource,
                       attribute=date_field)
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """
        Tests the import of US_Civil_Rights_Sitins0.csv
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')
        layer = self.generic_import("US_Civil_Rights_Sitins0.csv",
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['Date']
                                    }])

    def get_layer_names(self, in_file):
        """
        Gets layer names from a data source.
        """
        ds = DataSource(in_file)
        return map(lambda layer: layer.name, ds)

    def test_gdal_import(self):
        filename = 'point_with_date.geojson'
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', filename)
        self.generic_import(filename,
                            configuration_options=[{
                                'index': 0,
                                'convert_to_date': ['date']
                            }])

    def test_wfs(self):
        """
        Tests the import from a WFS Endpoint
        """
        wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs'
        gi = OGRImport(wfs)
        layers = gi.handle(configuration_options=[{
            'layer_name': 'og:bugsites'
        }, {
            'layer_name': 'topp:states'
        }])
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_arcgisjson(self):
        """
        Tests the import from a WFS Endpoint
        """
        endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json'
        gi = OGRImport(endpoint)
        layers = gi.handle(configuration_options=[{'index': 0}])
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def import_file(self, in_file, configuration_options=[]):
        """
        Imports the file.
        """
        self.assertTrue(os.path.exists(in_file))

        # run ogr2ogr
        gi = OGRImport(in_file)
        layers = gi.handle(configuration_options=configuration_options)

        return layers

    @staticmethod
    def createFeatureType(catalog, datastore, name):
        """
        Exposes a PostGIS feature type in geoserver.
        """
        headers = {"Content-type": "application/xml"}
        data = "<featureType><name>{name}</name></featureType>".format(
            name=name)
        url = datastore.href.replace(".xml",
                                     '/featuretypes.xml'.format(name=name))
        headers, response = catalog.http.request(url, "POST ", data, headers)
        return response

    def test_file_add_view(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()

        # test login required for this view
        request = c.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path,
                         reverse('uploads-list'))
        self.assertTrue(len(response.context['object_list']) == 1)
        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))

        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'empty_file.geojson')

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_file_add_view_as_json(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new-json'), {'file': fp},
                              follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('application/json', response.get('Content-Type', ''))
        content = json.loads(response.content)
        self.assertIn('state', content)
        self.assertIn('id', content)

    def test_describe_fields(self):
        """
        Tests the describe fields functionality.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'US_Shootings.csv')

        with GDALInspector(f) as f:
            layers = f.describe_fields()

        self.assertTrue(layers[0]['layer_name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], [
            'Date', 'Shooter', 'Killed', 'Wounded', 'Location', 'City',
            'Longitude', 'Latitude'
        ])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """
        Tests the describe fields functionality.
        """
        files = [
            (os.path.join(os.path.dirname(__file__), '..',
                          'importer-test-files', 'US_Shootings.csv'), 'CSV'),
            (os.path.join(os.path.dirname(__file__), '..',
                          'importer-test-files',
                          'point_with_date.geojson'), 'GeoJSON'),
            (os.path.join(os.path.dirname(__file__), '..',
                          'importer-test-files', 'mojstrovka.gpx'), 'GPX'),
            (os.path.join(os.path.dirname(__file__), '..',
                          'importer-test-files', 'us_states.kml'), 'KML'),
            (os.path.join(os.path.dirname(__file__), '..',
                          'importer-test-files',
                          'boxes_with_year_field.shp'), 'ESRI Shapefile'),
            (os.path.join(os.path.dirname(__file__), '..',
                          'importer-test-files',
                          'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'),
        ]

        from .models import NoDataSourceFound
        try:
            for path, file_type in files:
                with GDALInspector(path) as f:
                    self.assertEqual(f.file_type(), file_type)

        except NoDataSourceFound as e:
            print 'No data source found in: {0}'.format(path)
            raise e

    def test_configure_view(self):
        """
        Tests the configuration view.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        upload = response.context['object_list'][0]

        payload = [{
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True,
            'permissions': {
                'users': {
                    'test':
                    new_user_perms,
                    'AnonymousUser': [
                        "change_layer_data", "download_resourcebase",
                        "view_resourcebase"
                    ]
                }
            }
        }]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id),
                          data=json.dumps(payload),
                          content_type='application/json')

        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(Layer.objects.all()[0].owner.username,
                         self.non_admin_username)

        perms = layer.get_all_level_info()
        user = User.objects.get(username=self.non_admin_username)

        # check user permissions
        for perm in [
                u'publish_resourcebase', u'change_resourcebase_permissions',
                u'delete_resourcebase', u'change_resourcebase',
                u'change_resourcebase_metadata', u'download_resourcebase',
                u'view_resourcebase', u'change_layer_style',
                u'change_layer_data'
        ]:
            self.assertIn(perm, perms['users'][user])

        self.assertTrue(perms['users'][new_user])
        self.assertIn('change_resourcebase_permissions',
                      perms['users'][new_user])

        self.assertIn(
            "change_layer_data",
            perms['users'][User.objects.get(username='******')])

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)

    def test_configure_view_convert_date(self):
        """
        Tests the configure view with a dataset that needs to be converted to a date.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'US_Shootings.csv')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        upload = response.context['object_list'][0]

        payload = [{
            'index': 0,
            'convert_to_date': ['Date'],
            'start_date': 'Date',
            'configureTime': True,
            'editable': True
        }]

        response = c.get('/importer-api/data-layers/{0}/configure/'.format(
            upload.id))
        self.assertEqual(response.status_code, 405)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id))
        self.assertEqual(response.status_code, 400)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id),
                          data=json.dumps(payload),
                          content_type='application/json')
        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(layer.attributes.filter(attribute='date'),
                        'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        # ensure a user who does not own the upload cannot configure an import from it.
        c.logout()
        c.login_as_admin()
        response = c.post('/importer-api/data-layers/{0}/configure/'.format(
            upload.id))
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        c = AdminClient()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.username)
        non_admin = User.objects.get(username=self.non_admin_username)
        from osgeo_importer.models import UploadFile

        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'US_Shootings.csv')

        with open(f, 'rb') as f:
            uploaded_file = SimpleUploadedFile('test_data', f.read())
            admin_upload = UploadedData.objects.create(state='Admin test',
                                                       user=admin)
            admin_upload.uploadfile_set.add(
                UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(
                state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(
                UploadFile.objects.create(file=uploaded_file))

        c.login_as_admin()
        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = c.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        c = AdminClient()
        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """
        Ensure users can delete their data.
        """
        c = AdminClient()

        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)
        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """
        Ensure that administrators can delete data that isn't theirs.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)

        c.logout()
        c.login_as_admin()

        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """
        Tests providing a name in the configuration options.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()
        name = 'point-with-a-date'
        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'name': name,
            'editable': True
        }

        response = c.post('/importer-api/data-layers/1/configure/',
                          data=json.dumps(payload),
                          content_type='application/json')

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """
        Tests the import api.
        """
        f = os.path.join(os.path.dirname(__file__), '..',
                         'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp},
                              follow=True)

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True
        }

        self.assertTrue(
            isinstance(UploadLayer.objects.first().configuration_options,
                       dict))

        response = c.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = c.post('/importer-api/data-layers/1/configure/',
                          data=json.dumps([payload]),
                          content_type='application/json')

        self.assertEqual(response.status_code, 200)
        self.assertTrue('task' in response.content)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """
        Test the file extension validator.
        """

        for extension in load_handler(OSGEO_IMPORTER,
                                      'test.txt').valid_extensions:
            self.assertIsNone(
                validate_file_extension(
                    SimpleUploadedFile('test.{0}'.format(extension), '')))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """
        Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(
                SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(
            SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """
        Regression test for numeric field overflows in shapefiles.
        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import('Walmart.zip',
                            configuration_options=[{
                                "configureTime":
                                False,
                                "convert_to_date": ["W1_OPENDAT"],
                                "editable":
                                True,
                                "index":
                                0,
                                "name":
                                "Walmart",
                                "start_date":
                                "W1_OPENDAT"
                            }])

    def test_multipolygon_shapefile(self):
        """
        Tests shapefile with multipart polygons.
        """

        self.generic_import('PhoenixFirstDues.zip',
                            configuration_options=[{
                                'index': 0
                            }])

    def test_non_4326_SR(self):
        """
        Tests shapefile with multipart polygons.
        """

        res = self.generic_import('Istanbul.zip',
                                  configuration_options=[{
                                      'index': 0
                                  }])
        featuretype = self.cat.get_resource(res.name)
        self.assertEqual(featuretype.projection, 'EPSG:32635')

    def test_gwc_handler(self):
        """
        Tests the GeoWebCache handler
        """
        layer = self.generic_import('boxes_with_date.shp',
                                    configuration_options=[{
                                        'index':
                                        0,
                                        'convert_to_date': ['date'],
                                        'start_date':
                                        'date',
                                        'configureTime':
                                        True
                                    }])

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.cat.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertTrue('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import('boxes_with_date_iso_date.shp',
                                    configuration_options=[{
                                        'index': 0
                                    }])
        gs_layer = self.cat.get_layer(layer.name)
        self.cat._cache.clear()
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertFalse('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

    def test_utf8(self):
        """
        Tests utf8 characters in attributes
        """
        filename = os.path.join(os.path.dirname(__file__), '..',
                                'importer-test-files', 'china_provinces.shp')
        layer = self.generic_import('china_provinces.shp')
        gi = OGRImport(filename)
        ds, insp = gi.open_target_datastore(gi.target_store)
        sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" %
                  (layer.name))
        res = ds.ExecuteSQL(sql)
        feat = res.GetFeature(0)
        self.assertEqual(feat.GetField('name_ch'), "安州")

    def test_non_converted_date(self):
        """
        Test converting a field as date.
        """
        results = self.generic_import('TM_WORLD_BORDERS_2005.zip',
                                      configuration_options=[{
                                          'index':
                                          0,
                                          'start_date':
                                          'Year',
                                          'configureTime':
                                          True
                                      }])
        layer = self.cat.get_layer(results.typename)
        self.assertTrue('time' in layer.resource.metadata)
        self.assertEqual('year', layer.resource.metadata['time'].attribute)

    def test_fid_field(self):
        """
        Regression test for preserving an FID field when target layer supports it but source does not.
        """
        self.generic_import('noaa_paleoclimate.zip',
                            configuration_options=[{
                                'index': 0
                            }])

    def test_csv_with_wkb_geometry(self):
        """
        Tests problems with the CSV files with multiple geometries.
        """
        files = [
            'police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv',
            'police_csv_WKR.csv', 'police_csv_nFID.csv',
            'police_csv_nOGFID.csv', 'police_csv_noWKB.csv'
        ]

        for i in files:
            self.generic_import(
                i, {
                    "configureTime": True,
                    "convert_to_date": ["date_time"],
                    "editable": True,
                    "index": 0,
                    "name": i.lower(),
                    "permissions": {
                        "users": {
                            "AnonymousUser": [
                                "change_layer_data", "download_resourcebase",
                                "view_resourcebase"
                            ]
                        }
                    },
                    "start_date": "date_time",
                })
Beispiel #24
0
class CatalogTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'],
                           username=GSPARAMS['GSUSER'],
                           password=GSPARAMS['GSPASSWORD'])

    def testAbout(self):
        about_html = self.cat.about()
        self.assertTrue(
            '<html xmlns="http://www.w3.org/1999/xhtml"' in str(about_html))

    def testGSVersion(self):
        version = self.cat.gsversion()
        pat = re.compile('\d\.\d+(\.[\dx]|-SNAPSHOT)')
        self.assertTrue(pat.match('2.2.x'))
        self.assertTrue(pat.match('2.3.2'))
        self.assertTrue(pat.match('2.3-SNAPSHOT'))
        self.assertTrue(pat.match('2.10.1'))
        self.assertFalse(pat.match('2.3.y'))
        self.assertFalse(pat.match('233'))
        self.assertTrue(pat.match(version))

    def testWorkspaces(self):
        self.assertEqual(7, len(self.cat.get_workspaces()))
        # marking out test since geoserver default workspace is not consistent
        # self.assertEqual("cite", self.cat.get_default_workspace().name)
        self.assertEqual("topp", self.cat.get_workspace("topp").name)
        self.assertEqual("topp", self.cat.get_workspaces("topp")[-1].name)
        self.assertEqual(2,
                         len(self.cat.get_workspaces(names=['topp', 'sde'])))
        self.assertEqual(2, len(self.cat.get_workspaces(names='topp, sde')))

    def testStores(self):
        self.assertEqual(0, len(self.cat.get_stores("nonexistentstore")))
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        self.assertEqual(9, len(self.cat.get_stores()))
        self.assertEqual(2, len(self.cat.get_stores(workspace=topp)))
        self.assertEqual(2, len(self.cat.get_stores(workspace=sf)))
        self.assertEqual(2, len(self.cat.get_stores(workspace='sf')))
        self.assertEqual(
            2, len(self.cat.get_stores(names='states_shapefile, sfdem')))
        self.assertEqual(
            2, len(self.cat.get_stores(names=['states_shapefile', 'sfdem'])))
        self.assertEqual("sfdem", self.cat.get_stores("sfdem")[-1].name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile", topp).name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile").name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile", "topp").name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem").name)

    def testResources(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        states = self.cat.get_store("states_shapefile", topp)
        sfdem = self.cat.get_store("sfdem", sf)
        self.assertEqual(19, len(self.cat.get_resources()))
        self.assertEqual(1, len(self.cat.get_resources(states)))
        self.assertEqual(5, len(self.cat.get_resources(workspace=topp)))
        self.assertEqual(1, len(self.cat.get_resources(sfdem)))
        self.assertEqual(6, len(self.cat.get_resources(workspace=sf)))

        self.assertEqual("states",
                         self.cat.get_resource("states", states).name)
        self.assertEqual("states",
                         self.cat.get_resource("states", workspace=topp).name)
        self.assertEqual("states", self.cat.get_resource("states").name)
        states = self.cat.get_resource("states")

        fields = [
            states.title, states.abstract, states.native_bbox,
            states.latlon_bbox, states.projection, states.projection_policy
        ]

        self.assertFalse(None in fields, str(fields))
        self.assertFalse(len(states.keywords) == 0)
        self.assertFalse(len(states.attributes) == 0)
        self.assertTrue(states.enabled)

        self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name)
        self.assertEqual("sfdem",
                         self.cat.get_resource("sfdem", workspace=sf).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem").name)

    def testResourcesUpdate(self):
        res_dest = self.cat.get_resources()
        count = 0

        for rd in res_dest:
            # only wms layers
            if rd.resource_type != "wmsLayer":
                continue

            # looking for same name
            ro = self.cat.get_resource(rd.name)

            if ro is not None:
                rd.title = ro.title
                rd.abstract = ro.abstract
                rd.keywords = ro.keywords
                rd.projection = ro.projection
                rd.native_bbox = ro.native_bbox
                rd.latlon_bbox = ro.latlon_bbox
                rd.projection_policy = ro.projection_policy
                rd.enabled = ro.enabled
                rd.advertised = ro.advertised
                rd.metadata_links = ro.metadata_links or None

                self.cat.save(rd)
                self.cat.reload()

                # print "Updated layer: " + rd.name
                count += 1

        # print "Total updated layers: " + str(count)

    def testLayers(self):
        expected = set([
            "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem",
            "bugsites", "restricted", "streams", "archsites", "roads",
            "tasmania_roads", "tasmania_water_bodies",
            "tasmania_state_boundaries", "tasmania_cities", "states",
            "poly_landmarks", "tiger_roads", "poi", "giant_polygon"
        ])
        actual = set(l.name for l in self.cat.get_layers())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (
            extras, missing)
        self.assertTrue(len(expected ^ actual) == 0, message)

        states = self.cat.get_layer("states")

        self.assertEqual("states", states.name)
        self.assertIsInstance(states.resource, ResourceInfo)
        self.assertEqual(set(s.name for s in states.styles),
                         set(['pophatch', 'polygon']))
        self.assertEqual(states.default_style.name, "population")

    def testLayerGroups(self):
        expected = set(["tasmania", "tiger-ny", "spearfish"])
        actual = set(l.name for l in self.cat.get_layergroups())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (
            extras, missing)
        self.assertTrue(len(expected ^ actual) == 0, message)

        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual("tasmania", tas.name)
        self.assertIsInstance(tas, LayerGroup)
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        # Try to create a new Layer Group into the "topp" workspace
        self.assertIsNotNone(self.cat.get_workspace("topp"))
        tas2 = self.cat.create_layergroup("tasmania_reloaded",
                                          tas.layers,
                                          workspace="topp")
        self.cat.save(tas2)
        self.assertIsNone(self.cat.get_layergroup("tasmania_reloaded"))
        self.assertIsNotNone(
            self.cat.get_layergroup("tasmania_reloaded", "topp"))
        tas2 = self.cat.get_layergroup("tasmania_reloaded", "topp")
        self.assertEqual("tasmania_reloaded", tas2.name)
        self.assertIsInstance(tas2, LayerGroup)
        self.assertEqual(tas2.workspace, "topp", tas2.workspace)
        self.assertEqual(tas2.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas2.layers)
        self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles)

    def testStyles(self):
        self.assertEqual("population", self.cat.get_style("population").name)
        self.assertEqual("popshade.sld",
                         self.cat.get_style("population").filename)
        self.assertEqual("population",
                         self.cat.get_style("population").sld_name)
        self.assertIsNone(self.cat.get_style('non-existing-style'))

    def testEscaping(self):
        # GSConfig is inconsistent about using exceptions vs. returning None
        # when a resource isn't found.
        # But the basic idea is that none of them should throw HTTP errors from
        # misconstructed URLS
        self.cat.get_style("best style ever")
        self.cat.get_workspace("best workspace ever")
        self.assertEqual(
            self.cat.get_store(workspace="best workspace ever",
                               name="best store ever"), None)
        self.cat.get_layer("best layer ever")
        self.cat.get_layergroup("best layergroup ever")

    def testUnicodeUrl(self):
        """
        Tests that the geoserver.support.url function support unicode strings.
        """

        # Test the url function with unicode
        seg = [
            'workspaces', 'test', 'datastores', u'operaci\xf3n_repo',
            'featuretypes.xml'
        ]
        u = url(base=self.cat.service_url, seg=seg)
        self.assertEqual(
            u, self.cat.service_url +
            "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml")

        # Test the url function with normal string
        seg = [
            'workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml'
        ]
        u = url(base=self.cat.service_url, seg=seg)
        self.assertEqual(
            u, self.cat.service_url +
            "/workspaces/test/datastores/test-repo/featuretypes.xml")
Beispiel #25
0
def _register_cascaded_layers(service, owner=None):
    """
    Register layers for a cascading WMS
    """
    if service.type == 'WMS' or service.type == "OWS":
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest", 
                        _user , _password)
        # Can we always assume that it is geonode?
        # Should cascading layers have a separate workspace?
        cascade_ws = cat.get_workspace(settings.CASCADE_WORKSPACE)
        if cascade_ws is None:
            cascade_ws = cat.create_workspace(settings.CASCADE_WORKSPACE, 'cascade')
        try:
            store = cat.get_store(service.name,cascade_ws)
        except Exception:
            store = cat.create_wmsstore(service.name, cascade_ws)
        wms = WebMapService(service.base_url)
        layers = list(wms.contents)

        count = 0
        for layer in layers:
            lyr = cat.get_resource(layer, store, cascade_ws)
            if lyr is None:
                if service.type in ["WMS","OWS"]:
                    resource = cat.create_wmslayer(cascade_ws, store, layer)
                elif service.type == "WFS":
                    resource = cat.create_wfslayer(cascade_ws, store, layer)

                if resource:
                    cascaded_layer, created = Layer.objects.get_or_create(
                        typename = "%s:%s" % (cascade_ws.name, resource.name),
                        defaults = {
                            "name": resource.name,
                            "workspace": cascade_ws.name,
                            "store": store.name,
                            "storeType": store.resource_type,
                            "title": resource.title or 'No title provided',
                            "abstract": resource.abstract or 'No abstract provided',
                            "owner": None,
                            "uuid": str(uuid.uuid4())
                        })


                    if created:
                        cascaded_layer.save()
                        if cascaded_layer is not None and cascaded_layer.bbox is None:
                            cascaded_layer._populate_from_gs(gs_resource=resource)
                        cascaded_layer.set_default_permissions()

                        service_layer, created = ServiceLayer.objects.get_or_create(
                            service=service,
                            typename=cascaded_layer.name
                        )
                        service_layer.layer = cascaded_layer
                        service_layer.title=cascaded_layer.title,
                        service_layer.description=cascaded_layer.abstract,
                        service_layer.styles=cascaded_layer.styles
                        service_layer.save()

                        count += 1
                    else:
                        logger.error("Resource %s from store %s could not be saved as layer" % (layer, store.name))
        message = "%d Layers Registered" % count
        return_dict = {'status': 'ok', 'msg': message }
        return HttpResponse(json.dumps(return_dict),
                            mimetype='application/json',
                            status=200)
    elif service.type == 'WCS':
        return HttpResponse('Not Implemented (Yet)', status=501)
    else:
        return HttpResponse('Invalid Service Type', status=400)
# written at 9:21, on 2017-12-20, helping function
# removing the redundant layer groups - with vacant layers
import os, sys

from geoserver.catalog import Catalog
geoserver_url = "http://172.18.77.15:8089/geoserver"
cat = Catalog(geoserver_url + "/rest", username="******", password="******")

# getting the names of all the existing workspaces
srvr_wslist = cat.get_workspaces()

for ws_obj in srvr_wslist:
    ws_name = ws_obj.name
    srvr_lyrgrplist = cat.get_layergroups(workspace=ws_obj)

    # check if all the related layers exists in the current layer group
    for grp_obj in srvr_lyrgrplist:
        for mbr_lyr_name in grp_obj.layers:
            if not cat.get_resource(mbr_lyr_name):
                cat.delete(grp_obj)
                print "Deleting group: ", grp_obj.name
                break

# then removing all the layer groups with the appointed suffixes


Beispiel #27
0
class CatalogTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testAbout(self):
        about_html = self.cat.about()
        self.assertTrue('<html xmlns="http://www.w3.org/1999/xhtml"' in about_html)

    def testGSVersion(self):
        version = self.cat.gsversion()
        pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)')
        self.assertTrue(pat.match('2.2.x'))
        self.assertTrue(pat.match('2.3.2'))
        self.assertTrue(pat.match('2.3-SNAPSHOT'))
        self.assertFalse(pat.match('2.3.y'))
        self.assertFalse(pat.match('233'))
        self.assertTrue(pat.match(version))

    def testWorkspaces(self):
        self.assertEqual(7, len(self.cat.get_workspaces()))
        # marking out test since geoserver default workspace is not consistent 
        # self.assertEqual("cite", self.cat.get_default_workspace().name)
        self.assertEqual("topp", self.cat.get_workspace("topp").name)


    def testStores(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        self.assertEqual(9, len(self.cat.get_stores()))
        self.assertEqual(2, len(self.cat.get_stores(topp)))
        self.assertEqual(2, len(self.cat.get_stores(sf)))
        self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile", topp).name)
        self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name)
        self.assertEqual("states_shapefile", self.cat.get_store("states_shapefile").name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem").name)

  
    def testResources(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        states = self.cat.get_store("states_shapefile", topp)
        sfdem = self.cat.get_store("sfdem", sf)
        self.assertEqual(19, len(self.cat.get_resources()))
        self.assertEqual(1, len(self.cat.get_resources(states)))
        self.assertEqual(5, len(self.cat.get_resources(workspace=topp)))
        self.assertEqual(1, len(self.cat.get_resources(sfdem)))
        self.assertEqual(6, len(self.cat.get_resources(workspace=sf)))

        self.assertEqual("states", self.cat.get_resource("states", states).name)
        self.assertEqual("states", self.cat.get_resource("states", workspace=topp).name)
        self.assertEqual("states", self.cat.get_resource("states").name)
        states = self.cat.get_resource("states")

        fields = [
            states.title,
            states.abstract,
            states.native_bbox,
            states.latlon_bbox,
            states.projection,
            states.projection_policy
        ]

        self.assertFalse(None in fields, str(fields))
        self.assertFalse(len(states.keywords) == 0)
        self.assertFalse(len(states.attributes) == 0)
        self.assertTrue(states.enabled)

        self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem", workspace=sf).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem").name)


    def testLayers(self):
        expected = set(["Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem",
            "bugsites", "restricted", "streams", "archsites", "roads",
            "tasmania_roads", "tasmania_water_bodies", "tasmania_state_boundaries",
            "tasmania_cities", "states", "poly_landmarks", "tiger_roads", "poi",
            "giant_polygon"
        ])
        actual = set(l.name for l in self.cat.get_layers())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        states = self.cat.get_layer("states")

        self.assert_("states", states.name)
        self.assert_(isinstance(states.resource, ResourceInfo))
        self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon']))
        self.assertEqual(states.default_style.name, "population")

    def testLayerGroups(self):
        expected = set(["tasmania", "tiger-ny", "spearfish"])
        actual = set(l.name for l in self.cat.get_layergroups())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        tas = self.cat.get_layergroup("tasmania")

        self.assert_("tasmania", tas.name)
        self.assert_(isinstance(tas, LayerGroup))
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

    def testStyles(self):
        self.assertEqual(20, len(self.cat.get_styles()))
        self.assertEqual("population", self.cat.get_style("population").name)
        self.assertEqual("popshade.sld", self.cat.get_style("population").filename)
        self.assertEqual("population", self.cat.get_style("population").sld_name)

    def testEscaping(self):
        # GSConfig is inconsistent about using exceptions vs. returning None
        # when a resource isn't found.
        # But the basic idea is that none of them should throw HTTP errors from
        # misconstructed URLS
        self.cat.get_style("best style ever")
        self.cat.get_workspace("best workspace ever")
        self.cat.get_store(workspace="best workspace ever",
                name="best store ever")
        self.assertRaises(FailedRequestError,
            lambda: self.cat.get_resource(
                workspace="best workspace ever", store="best store ever",
                name="best resource ever"))
        self.cat.get_layer("best layer ever")
        self.cat.get_layergroup("best layergroup ever")

    def testUnicodeUrl(self):
        """
        Tests that the geoserver.support.url function support unicode strings.
        """

        # Test the url function with unicode
        seg = ['workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml']
        u = url(base=self.cat.service_url, seg=seg)
        self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml")

        # Test the url function with normal string
        seg = ['workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml']
        u = url(base=self.cat.service_url, seg=seg)
        self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
class UploaderTests(DjagnoOsgeoMixin):
    """
    Basic checks to make sure pages load, etc.
    """

    def create_datastore(self, connection, catalog):
        settings = connection.settings_dict
        params = {'database': settings['NAME'],
                  'passwd': settings['PASSWORD'],
                  'namespace': 'http://www.geonode.org/',
                  'type': 'PostGIS',
                  'dbtype': 'postgis',
                  'host': settings['HOST'],
                  'user': settings['USER'],
                  'port': settings['PORT'],
                  'enabled': "True"}

        store = catalog.create_datastore(settings['NAME'], workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(settings['NAME'])

    def setUp(self):

        if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')):
            self.skipTest('Skipping test due to missing test data.')

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.username, self.password = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin')
        self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials)
        if self.cat.get_workspace('geonode') == None:
            self.cat.create_workspace('geonode', 'http://www.geonode.org/')
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.cat)

    def tearDown(self):
        """
        Clean up geoserver.
        """
        self.cat.delete(self.datastore, recurse=True)

    def generic_import(self, file, configuration_options=[{'index': 0}]):

        f = file
        filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f)

        res = self.import_file(filename, configuration_options=configuration_options)
        layer_results=[]

        for result in res:
            if result[1].get('raster'):
                layerfile = result[0]
                layername = os.path.splitext(os.path.basename(layerfile))[0]
                layer = Layer.objects.get(name=layername)
                self.assertTrue(layerfile.endswith('.tif'))
                self.assertTrue(os.path.exists(layerfile))
                l = gdal.OpenEx(layerfile)
                self.assertTrue(l.GetDriver().ShortName, 'GTiff')
                layer_results.append(layer)
            else:
                layer = Layer.objects.get(name=result[0])
                self.assertEqual(layer.srid, 'EPSG:4326')
                self.assertEqual(layer.store, self.datastore.name)
                self.assertEqual(layer.storeType, 'dataStore')

                if not filename.endswith('zip'):
                    self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields)

                layer_results.append(layer)

        return layer_results[0]

    def generic_raster_import(self, file, configuration_options=[{'index': 0}]):
        f = file
        filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', f)
        res = self.import_file(filename, configuration_options=configuration_options)
        layerfile = res[0][0]
        layername = os.path.splitext(os.path.basename(layerfile))[0]
        layer = Layer.objects.get(name=layername)
        self.assertTrue(layerfile.endswith('.tif'))
        self.assertTrue(os.path.exists(layerfile))
        l = gdal.OpenEx(layerfile)
        self.assertTrue(l.GetDriver().ShortName, 'GTiff')
        return layer

    def test_raster(self):
        """
        Tests raster import
        """
        layer = self.generic_raster_import('test_grid.tif', configuration_options=[{'index': 0}])

    def test_box_with_year_field(self):
        """
        Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """
        Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date'],
                                                                                   'start_date': 'date',
                                                                                   'configureTime': True
                                                                                   }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_gpkg(self):
        """
        Tests the import of test_boxes_with_date.gpkg.
        """

        layer = self.generic_import('boxes_with_date.gpkg', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date'],
                                                                                   'start_date': 'date',
                                                                                   'configureTime': True
                                                                                   }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_plus_raster_gpkg_by_index(self):
        """
        Tests the import of multilayer vector + raster geopackage using index
        """

        layer = self.generic_import('boxes_plus_raster.gpkg', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date'],
                                                                                   'start_date': 'date',
                                                                                   'configureTime': True
                                                                                   },
                                                                                   {'index':1},
                                                                                   {'index':2},
                                                                                   {'index':3},
                                                                                   {'index':4},
                                                                                   {'index':5},
                                                                                   {'index':6},
                                                                                   {'index':7},])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """
        Tests a CSV with WKT polygon.
        """

        layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_csv_missing_features(self):
        """
        Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding.
        """

        self.generic_import('missing-features.csv', configuration_options=[{'index': 0}])

    def test_boxes_with_iso_date(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0,
                                                                                            'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_duplicate_imports(self):
        """
        Tests importing the same layer twice to ensure incrementing file names is properly handled.
        """
        filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip')

        gi = OGRImport(filename)
        layers1 = gi.handle({'index': 0, 'name': 'test'})
        layers2 = gi.handle({'index': 0,  'name': 'test'})

        self.assertEqual(layers1[0][0], 'test')
        self.assertEqual(layers2[0][0], 'test0')

    def test_launder(self):
        """
        Ensure the launder function works as expected.
        """
        self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3')
        self.assertEqual(launder('Testing#'), 'testing_')
        self.assertEqual(launder('   '), '_')

    def test_boxes_with_date_iso_date_zip(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0,
                                                                                          'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """
        Tests the import of point_with_date.geojson
        """

        layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0,
                                                                                       'convert_to_date': ['date']}])

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """
        Tests the import of test_boxes_with_end_date.
        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import('boxes_with_end_date.shp',configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date','enddate'],
                                                                                         'start_date': 'date',
                                                                                         'end_date': 'enddate',
                                                                                         'configureTime': True
                                                                                         }])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        end_date_attr = filter(lambda attr: attr.attribute == 'enddate_as_date', layer.attributes)[0]

        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,
                       end_attribute=end_date_attr.attribute)

        self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute)

    def test_us_states_kml(self):
        """
        Tests the import of us_states_kml.
        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.

        layer = self.generic_import('us_states.kml',configuration_options=[{'index': 0}])

    def test_mojstrovka_gpx(self):
        """
        Tests the import of mojstrovka.gpx.
        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import('mojstrovka.gpx',configuration_options=[{'index': 0,
                                                         'convert_to_date': ['time'],
                                                         'configureTime': True
                                                         }])
        date_attr = filter(lambda attr: attr.attribute == 'time_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """
        Convenience method to run generic tests on time layers.
        """
        self.cat._cache.clear()
        resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace)

        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual(attribute, timeInfo.attribute)
        self.assertEqual(end_attribute, timeInfo.end_attribute)

    def test_us_shootings_csv(self):
        """
        Tests the import of US_Shootings.csv.
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')

        filename = 'US_Shootings.csv'
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename)
        layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}])
        self.assertTrue(layer.name.startswith('us_shootings'))

        date_field = 'date'
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field)
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """
        Tests the import of US_Civil_Rights_Sitins0.csv
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')
        layer = self.generic_import("US_Civil_Rights_Sitins0.csv", configuration_options=[{'index': 0, 'convert_to_date': ['Date']}])

    def get_layer_names(self, in_file):
        """
        Gets layer names from a data source.
        """
        ds = DataSource(in_file)
        return map(lambda layer: layer.name, ds)

    def test_gdal_import(self):
        filename = 'point_with_date.geojson'
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', filename)
        self.generic_import(filename, configuration_options=[{'index': 0,  'convert_to_date': ['date']}])

    def test_wfs(self):
        """
        Tests the import from a WFS Endpoint
        """
        wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs'
        gi = OGRImport(wfs)
        layers = gi.handle(configuration_options=[{'layer_name':'og:bugsites'},
                                                   {'layer_name':'topp:states'}])
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_arcgisjson(self):
        """
        Tests the import from a WFS Endpoint
        """
        endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json'
        gi = OGRImport(endpoint)
        layers = gi.handle(configuration_options=[{'index':0}])
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def import_file(self, in_file, configuration_options=[]):
        """
        Imports the file.
        """
        self.assertTrue(os.path.exists(in_file))

        # run ogr2ogr
        gi = OGRImport(in_file)
        layers = gi.handle(configuration_options=configuration_options)

        return layers

    @staticmethod
    def createFeatureType(catalog, datastore, name):
        """
        Exposes a PostGIS feature type in geoserver.
        """
        headers = {"Content-type": "application/xml"}
        data = "<featureType><name>{name}</name></featureType>".format(name=name)
        url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name))
        headers, response = catalog.http.request(url, "POST ", data, headers)
        return response


    def test_file_add_view(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()

        # test login required for this view
        request = c.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path, reverse('uploads-list'))
        self.assertTrue(len(response.context['object_list']) == 1)
        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))

        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'empty_file.geojson')

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_file_add_view_as_json(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new-json'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('application/json', response.get('Content-Type', ''))
        content = json.loads(response.content)
        self.assertIn('state', content)
        self.assertIn('id', content)

    def test_describe_fields(self):
        """
        Tests the describe fields functionality.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv')

        with GDALInspector(f) as f:
            layers = f.describe_fields()

        self.assertTrue(layers[0]['layer_name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed',
                                                                    'Wounded', 'Location', 'City',
                                                                    'Longitude', 'Latitude'])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """
        Tests the describe fields functionality.
        """
        files = [
                 (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv'), 'CSV'),
                 (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson'), 'GeoJSON'),
                 (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'mojstrovka.gpx'), 'GPX'),
                 (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'us_states.kml'), 'KML'),
                 (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_year_field.shp'), 'ESRI Shapefile'),
                 (os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'),
            ]

        from .models import NoDataSourceFound
        try:
            for path, file_type in files:
                with GDALInspector(path) as f:
                    self.assertEqual(f.file_type(), file_type)

        except NoDataSourceFound as e:
            print 'No data source found in: {0}'.format(path)
            raise e

    def test_configure_view(self):
        """
        Tests the configuration view.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True,
                    'editable': True,
                    'permissions': {'users': {'test': new_user_perms,
                                              'AnonymousUser': ["change_layer_data", "download_resourcebase",
                                                                "view_resourcebase"]}}}]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username)

        perms = layer.get_all_level_info()
        user = User.objects.get(username=self.non_admin_username)

        # check user permissions
        for perm in [u'publish_resourcebase', u'change_resourcebase_permissions',
                     u'delete_resourcebase', u'change_resourcebase', u'change_resourcebase_metadata',
                     u'download_resourcebase', u'view_resourcebase', u'change_layer_style',
                     u'change_layer_data']:
            self.assertIn(perm, perms['users'][user])

        self.assertTrue(perms['users'][new_user])
        self.assertIn('change_resourcebase_permissions', perms['users'][new_user])

        self.assertIn("change_layer_data", perms['users'][User.objects.get(username='******')])

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)

    def test_configure_view_convert_date(self):
        """
        Tests the configure view with a dataset that needs to be converted to a date.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'convert_to_date': ['Date'],
                    'start_date': 'Date',
                    'configureTime': True,
                    'editable': True}]


        response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 405)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 400)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')
        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(layer.attributes.filter(attribute='date'), 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        # ensure a user who does not own the upload cannot configure an import from it.
        c.logout()
        c.login_as_admin()
        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        c = AdminClient()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.username)
        non_admin = User.objects.get(username=self.non_admin_username)
        from osgeo_importer.models import UploadFile

        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'US_Shootings.csv')

        with open(f, 'rb') as f:
            uploaded_file = SimpleUploadedFile('test_data', f.read())
            admin_upload = UploadedData.objects.create(state='Admin test', user=admin)
            admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

        c.login_as_admin()
        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = c.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        c = AdminClient()
        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """
        Ensure users can delete their data.
        """
        c = AdminClient()

        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)
        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """
        Ensure that administrators can delete data that isn't theirs.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)

        c.logout()
        c.login_as_admin()

        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """
        Tests providing a name in the configuration options.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()
        name = 'point-with-a-date'
        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        payload = {'index': 0,
                   'convert_to_date': ['date'],
                   'start_date': 'date',
                   'configureTime': True,
                   'name': name,
                   'editable': True}

        response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload),
                          content_type='application/json')

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """
        Tests the import api.
        """
        f = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        payload = {'index': 0,
                   'convert_to_date': ['date'],
                   'start_date': 'date',
                   'configureTime': True,
                   'editable': True}

        self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict))

        response = c.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]),
                          content_type='application/json')

        self.assertEqual(response.status_code, 200)
        self.assertTrue('task' in response.content)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """
        Test the file extension validator.
        """

        for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions:
            self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), '')))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """
        Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """
        Regression test for numeric field overflows in shapefiles.
        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import('Walmart.zip', configuration_options=[{"configureTime":False,"convert_to_date":["W1_OPENDAT"],"editable":True,"index":0,"name":"Walmart","start_date":"W1_OPENDAT"}])

    def test_multipolygon_shapefile(self):
        """
        Tests shapefile with multipart polygons.
        """

        self.generic_import('PhoenixFirstDues.zip', configuration_options=[{'index': 0}])

    def test_non_4326_SR(self):
        """
        Tests shapefile with multipart polygons.
        """

        res = self.generic_import('Istanbul.zip', configuration_options=[{'index': 0}])
        featuretype = self.cat.get_resource(res.name)
        self.assertEqual(featuretype.projection, 'EPSG:32635')

    def test_gwc_handler(self):
        """
        Tests the GeoWebCache handler
        """
        layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0,
                                                                                   'convert_to_date': ['date'],
                                                                                   'start_date': 'date',
                                                                                   'configureTime': True
                                                                                   }])

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.cat.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertTrue('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0}])
        gs_layer = self.cat.get_layer(layer.name)
        self.cat._cache.clear()
        gs_layer.fetch()

        payload = self.cat.http.request(gwc.gwc_url(gs_layer))
        self.assertFalse('regexParameterFilter' in payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

    def test_utf8(self):
        """
        Tests utf8 characters in attributes
        """
        filename = os.path.join(os.path.dirname(__file__), '..', 'importer-test-files', 'china_provinces.shp')
        layer = self.generic_import('china_provinces.shp')
        gi = OGRImport(filename)
        ds, insp = gi.open_target_datastore(gi.target_store)
        sql = str("select NAME_CH from %s where NAME_PY = 'An Zhou'" % (layer.name))
        res = ds.ExecuteSQL(sql)
        feat = res.GetFeature(0)
        self.assertEqual(feat.GetField('name_ch'), "安州")

    def test_non_converted_date(self):
        """
        Test converting a field as date.
        """
        results = self.generic_import('TM_WORLD_BORDERS_2005.zip', configuration_options=[{'index': 0,
                                                                                           'start_date': 'Year',
                                                                                           'configureTime': True}])
        layer = self.cat.get_layer(results.typename)
        self.assertTrue('time' in layer.resource.metadata)
        self.assertEqual('year', layer.resource.metadata['time'].attribute)

    def test_fid_field(self):
        """
        Regression test for preserving an FID field when target layer supports it but source does not.
        """
        self.generic_import('noaa_paleoclimate.zip', configuration_options=[{'index': 0}])

    def test_csv_with_wkb_geometry(self):
        """
        Tests problems with the CSV files with multiple geometries.
        """
        files = ['police_csv.csv', 'police_csv_nOGC.csv', 'police_csv_noLatLon.csv', 'police_csv_WKR.csv', 'police_csv_nFID.csv',
        'police_csv_nOGFID.csv', 'police_csv_noWKB.csv']

        for i in files:
            self.generic_import(i, {"configureTime":True,"convert_to_date":["date_time"],"editable":True,"index":0,"name":i.lower(),"permissions":{"users":{"AnonymousUser":["change_layer_data","download_resourcebase","view_resourcebase"]}},"start_date":"date_time",})
Beispiel #29
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'],
                           username=GSPARAMS['GSUSER'],
                           password=GSPARAMS['GSPASSWORD'])

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assertIsNone(self.cat.get_layer('import'))
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assertIsNotNone(self.cat.get_layer('import'))
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            try:
                if lyr:
                    self.cat.delete(lyr)
                    self.cat.delete(lyr.resource)
                if ds:
                    self.cat.delete(ds)
            except Exception:
                pass

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(
            ds, "import", {
                'shp': 'test/data/states.shp',
                'shx': 'test/data/states.shx',
                'dbf': 'test/data/states.dbf',
                'prj': 'test/data/states.prj'
            })

    @drop_table('import2')
    def testVirtualTables(self):
        ds = self.cat.create_datastore("gsconfig_import_test2")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test2")
        self.cat.add_data_to_store(
            ds, "import2", {
                'shp': 'test/data/states.shp',
                'shx': 'test/data/states.shx',
                'dbf': 'test/data/states.dbf',
                'prj': 'test/data/states.prj'
            })
        store = self.cat.get_store("gsconfig_import_test2")
        geom = JDBCVirtualTableGeometry('the_geom', 'MultiPolygon', '4326')
        ft_name = 'my_jdbc_vt_test'
        epsg_code = 'EPSG:4326'
        sql = "select * from import2 where 'STATE_NAME' = 'Illinois'"
        keyColumn = None
        parameters = None

        jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn,
                                   parameters)
        self.cat.publish_featuretype(ft_name,
                                     store,
                                     epsg_code,
                                     jdbc_virtual_table=jdbc_vt)

    # DISABLED; this test works only in the very particular case
    # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR
    # def testCoverageStoreCreate(self):
    #     ds = self.cat.create_coveragestore2("coverage_gsconfig")
    #     ds.data_url = "file:test/data/mytiff.tiff"
    #     self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs)

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(
            ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEqual(set(rs.request_srs_list), srs_before,
                         str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(set(rs.request_srs_list), srs_after,
                         str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEqual(set(rs.response_srs_list), srs_before,
                         str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(set(rs.response_srs_list), srs_after,
                         str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEqual(set(rs.supported_formats), formats,
                         str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(set(rs.supported_formats), formats_after,
                         str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&"  # noqa: E501
        ws.type = "WMS"
        self.cat.save(ws)

    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&"  # noqa: E501
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(workspace=wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            # sanitize the layer name - validation will fail on newer geoservers
            name = layer.replace(':', '_')
            self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

        changed_layer = added_layers[0]
        self.assertEqual(True, changed_layer.advertised)
        self.assertEqual(True, changed_layer.enabled)
        changed_layer.advertised = False
        changed_layer.enabled = False
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        changed_layer = wmsstore.get_resources()[0]
        changed_layer.fetch()
        self.assertEqual(False, changed_layer.advertised)
        self.assertEqual(False, changed_layer.enabled)

        # Testing projection and projection policy changes
        changed_layer.projection = "EPSG:900913"
        changed_layer.projection_policy = "REPROJECT_TO_DECLARED"
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        layer = self.cat.get_layer(changed_layer.name)
        self.assertEqual(layer.resource.projection_policy,
                         changed_layer.projection_policy)
        self.assertEqual(layer.resource.projection, changed_layer.projection)

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.items():
            self.assertEqual(v, shapefile_plus_sidecars[k])

        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars,
                                     sf)

        self.assertIsNotNone(self.cat.get_resource("states_test",
                                                   workspace=sf))

        self.assertRaises(
            ConflictingDataError, lambda: self.cat.create_featurestore(
                "states_test", shapefile_plus_sidecars, sf))

        self.assertRaises(
            UploadError, lambda: self.cat.create_coveragestore(
                "states_raster_test", shapefile_plus_sidecars, sf))

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf))

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assertIsNone(self.cat.get_layer("states_test"))

    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        self.assertIsNotNone(self.cat.get_resource("Pk50095", workspace=sf))

        self.assertRaises(
            ConflictingDataError,
            lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf))

        self.assertRaises(
            UploadError, lambda: self.cat.create_featurestore(
                "Pk50095_vector", tiffdata, sf))

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff))

        self.cat.create_coveragestore_external_geotiff(
            "Pk50095_ext", 'file:test/data/Pk50095.tif', sf)

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = {
            'title': 'Not the original attribution',
            'width': '123',
            'height': '321',
            'href': 'http://www.georchestra.org',
            'url':
            'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg',
            'type': 'image/jpeg'
        }

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")

        old_default_style = lyr.default_style
        lyr.default_style = next(
            (s for s in lyr.styles if s.name == "pophatch"))
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])

    def testStyles(self):
        # check count before tests (upload)
        count = len(self.cat.get_styles())

        # upload new style, verify existence
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("fred", fred_sld.read())
        fred = self.cat.get_style("fred")
        self.assertIsNotNone(fred)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        with open("test/ted.sld") as ted_sld:
            self.cat.create_style("fred", ted_sld.read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assertIsNotNone(fred)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assertIsNone(self.cat.get_style("fred"))

        # attempt creating new style
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("fred", fred_sld.read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assertIsNotNone(f)
        self.assertEqual(f.name, fred.name)

        # compare count after upload
        self.assertEqual(count + 1, len(self.cat.get_styles()))

        # attempt creating a new style without "title"
        with open("test/notitle.sld") as notitle_sld:
            self.cat.create_style("notitle", notitle_sld.read())
        notitle = self.cat.get_style("notitle")
        self.assertEqual(None, notitle.sld_title)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("jed", fred_sld.read(), workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assertIsNone(jed)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertIsNotNone(jed)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assertIsNotNone(jed)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        with open("test/ted.sld") as ted_sld:
            self.cat.create_style("jed",
                                  ted_sld.read(),
                                  overwrite=True,
                                  workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertIsNotNone(jed)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assertIsNone(self.cat.get_style("jed", workspace="topp"))

        # attempt creating new style
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("jed", fred_sld.read(), workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assertIsNotNone(f)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("ned",
                                  fred_sld.read(),
                                  overwrite=True,
                                  workspace="topp")
        with open("test/fred.sld") as ted_sld:
            self.cat.create_style("zed",
                                  ted_sld.read(),
                                  overwrite=True,
                                  workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assertIsNotNone(ned)
        self.assertIsNotNone(zed)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        if lyr.default_style is not None:
            self.assertEqual("topp:ned", lyr.default_style.fqn)
            self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self):
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assertIsNone(ws)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assertTrue(states.enabled)
        states.enabled = False
        self.assertFalse(states.enabled)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assertFalse(states.enabled)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assertTrue(states.enabled)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

    def testImageMosaic(self):
        """
            Test case for Issue #110
        """
        # testing the mosaic creation
        name = 'cea_mosaic'
        with open('test/data/mosaic/cea.zip', 'rb') as data:
            self.cat.create_imagemosaic(name, data)

        # get the layer resource back
        self.cat._cache.clear()
        resource = self.cat.get_layer(name).resource

        self.assertIsNotNone(resource)

        # delete granule from mosaic
        coverage = name
        store = self.cat.get_store(name)
        granules = self.cat.list_granules(coverage, store)
        self.assertEqual(1, len(granules['features']))
        granule_id = name + '.1'
        self.cat.mosaic_delete_granule(coverage, store, granule_id)
        granules = self.cat.list_granules(coverage, store)
        self.assertEqual(0, len(granules['features']))
        """
          testing external Image mosaic creation
        """
        name = 'cea_mosaic_external'
        path = 'test/data/mosaic/external'
        self.cat.create_imagemosaic(name, path, workspace='topp')
        self.cat._cache.clear()
        resource = self.cat.get_layer("external").resource
        self.assertIsNotNone(resource)

        # add granule to mosaic
        granule_path = 'test/data/mosaic/granules/cea_20150102.tif'
        self.cat.add_granule(granule_path, name, workspace='topp')
        granules = self.cat.list_granules("external", name, 'topp')
        self.assertEqual(2, len(granules['features']))

        # add external granule to mosaic
        granule_path = os.path.join(
            os.getcwd(), 'test/data/mosaic/granules/cea_20150103.zip')
        self.cat.add_granule(granule_path, name, workspace='topp')
        granules = self.cat.list_granules("external", name, 'topp')
        self.assertEqual(3, len(granules['features']))

        # Delete store
        store = self.cat.get_store(name)
        self.cat.delete(store, purge=True, recurse=True)
        self.cat._cache.clear()

    def testTimeDimension(self):
        sf = self.cat.get_workspace("sf")
        files = shapefile_and_friends(
            os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date"))
        self.cat.create_featurestore("boxes_with_end_date", files, sf)

        get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer(
            'boxes_with_end_date').resource  # noqa: E501

        # configure time as LIST
        resource = get_resource()
        timeInfo = DimensionInfo("time",
                                 "true",
                                 "LIST",
                                 None,
                                 "ISO8601",
                                 None,
                                 attribute="date")
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual("date", timeInfo.attribute)
        self.assertEqual("ISO8601", timeInfo.units)

        # disable time dimension
        timeInfo = resource.metadata['time']
        timeInfo.enabled = False
        # since this is an xml property, it won't get written unless we modify it
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(False, timeInfo.enabled)

        # configure with interval, end_attribute and enable again
        timeInfo.enabled = True
        timeInfo.presentation = 'DISCRETE_INTERVAL'
        timeInfo.resolution = '3 days'
        timeInfo.end_attribute = 'enddate'
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation)
        self.assertEqual('3 days', timeInfo.resolution_str())
        self.assertEqual('enddate', timeInfo.end_attribute)
Beispiel #30
0
class CatalogTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD'])
        self.gs_version = self.cat.get_short_version()

    def testGSVersion(self):
        version = self.cat.get_version()
        pat = re.compile('\d\.\d+')
        self.assertTrue(pat.match('2.2.x'))
        self.assertTrue(pat.match('2.3.2'))
        self.assertTrue(pat.match('2.3-SNAPSHOT'))
        self.assertTrue(pat.match(version))

    def testWorkspaces(self):
        self.assertEqual(7, len(self.cat.get_workspaces()))
        # marking out test since geoserver default workspace is not consistent
        # self.assertEqual("cite", self.cat.get_default_workspace().name)
        self.assertEqual("topp", self.cat.get_workspaces(names="topp")[-1].name)
        self.assertEqual(2, len(self.cat.get_workspaces(names=['topp', 'sde'])))
        self.assertEqual(2, len(self.cat.get_workspaces(names='topp, sde')))
        self.assertEqual("topp", self.cat.get_workspace("topp").name)
        self.assertIsNone(self.cat.get_workspace("blahblah-"))

    def testStores(self):
        self.assertEqual(0, len(self.cat.get_stores(names="nonexistentstore")))
        topp = self.cat.get_workspaces("topp")[0]
        sf = self.cat.get_workspaces("sf")[0]
        self.assertEqual(9, len(self.cat.get_stores()))
        self.assertEqual(2, len(self.cat.get_stores(workspaces=topp)))
        self.assertEqual(2, len(self.cat.get_stores(workspaces=sf)))
        self.assertEqual(2, len(self.cat.get_stores(workspaces='sf')))
        self.assertEqual(2, len(self.cat.get_stores(names='states_shapefile, sfdem')))
        self.assertEqual(2, len(self.cat.get_stores(names=['states_shapefile', 'sfdem'])))
        self.assertEqual("states_shapefile", self.cat.get_stores(names="states_shapefile", workspaces=topp.name)[0].name)
        self.assertEqual("states_shapefile", self.cat.get_stores(names="states_shapefile")[0].name)
        self.assertEqual("sfdem", self.cat.get_stores(names="sfdem", workspaces=sf.name)[0].name)
        self.assertEqual("sfdem", self.cat.get_stores(names="sfdem")[0].name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem").name)
        self.assertIsNone(self.cat.get_store("blah+blah-"))

    def testResources(self):
        topp = self.cat.get_workspaces("topp")[0]
        sf = self.cat.get_workspaces("sf")[0]
        states = self.cat.get_stores(names="states_shapefile", workspaces=topp.name)[0]
        sfdem = self.cat.get_stores(names="sfdem", workspaces=sf.name)[0]
        self.assertEqual(19, len(self.cat.get_resources()))
        self.assertEqual(2, len(self.cat.get_resources(stores=[states.name, sfdem.name])))
        self.assertEqual(11, len(self.cat.get_resources(workspaces=[topp.name, sf.name])))

        self.assertEqual("states", self.cat.get_resources(names="states", stores=states.name)[0].name)
        self.assertEqual("states", self.cat.get_resources(names="states", workspaces=topp.name)[0].name)
        self.assertEqual("states", self.cat.get_resources(names="states")[0].name)
        self.assertEqual("states", self.cat.get_resource("states").name)
        self.assertIsNone(self.cat.get_resource("blah+1blah-2"))

        states = self.cat.get_resources(names="states")[0]

        fields = [
            states.title,
            states.abstract,
            states.native_bbox,
            states.latlon_bbox,
            states.projection,
            states.projection_policy
        ]

        self.assertFalse(None in fields, str(fields))
        self.assertFalse(len(states.keywords) == 0)
        self.assertFalse(len(states.attributes) == 0)
        self.assertTrue(states.enabled)

        self.assertEqual("sfdem", self.cat.get_resources(names="sfdem", stores=sfdem.name)[0].name)
        self.assertEqual("sfdem", self.cat.get_resources(names="sfdem", workspaces=sf.name)[0].name)
        self.assertEqual("sfdem", self.cat.get_resources(names="sfdem")[0].name)

    def testResourcesUpdate(self):
        res_dest = self.cat.get_resources()
        count = 0

        for rd in res_dest:
            # only wms layers
            if rd.resource_type != "wmsLayer":
                continue
            # looking for same name
            ro = self.cat.get_resources(names=rd.name)

            if ro is not None:
                rd.title = ro.title
                rd.abstract = ro.abstract
                rd.keywords = ro.keywords
                rd.projection = ro.projection
                rd.native_bbox = ro.native_bbox
                rd.latlon_bbox = ro.latlon_bbox
                rd.projection_policy = ro.projection_policy
                rd.enabled = ro.enabled
                rd.advertised = ro.advertised
                rd.metadata_links = ro.metadata_links or None

                self.cat.save(rd)
                self.cat.reload()
                count += 1

    def testLayers(self):
        if self.gs_version >= "2.13":
            expected = set([
                'sf:roads',
                'sf:sfdem',
                'nurc:mosaic',
                'tiger:giant_polygon',
                'sf:bugsites',
                'topp:states',
                'sf:streams',
                'tiger:poly_landmarks',
                'tiger:poi',
                'topp:tasmania_water_bodies',
                'tiger:tiger_roads',
                'topp:tasmania_roads',
                'nurc:Pk50095',
                'topp:tasmania_cities',
                'nurc:Img_Sample',
                'sf:restricted',
                'nurc:Arc_Sample',
                'sf:archsites',
                'topp:tasmania_state_boundaries'
            ])
        else:
            expected = set([
                "Arc_Sample",
                "Pk50095",
                "Img_Sample",
                "mosaic",
                "sfdem",
                "bugsites",
                "restricted",
                "streams",
                "archsites",
                "roads",
                "tasmania_roads",
                "tasmania_water_bodies",
                "tasmania_state_boundaries",
                "tasmania_cities",
                "states",
                "poly_landmarks",
                "tiger_roads",
                "poi",
                "giant_polygon"
            ])

        actual = set(l.name for l in self.cat.get_layers())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        states = self.cat.get_layer("states")

        self.assert_("states", states.name)
        self.assert_(isinstance(states.resource, ResourceInfo))
        self.assertEqual(set(s.name for s in states.styles), set(['pophatch', 'polygon']))
        self.assertEqual(states.default_style.name, "population")

    def testLayerGroups(self):
        expected = set(["tasmania", "tiger-ny", "spearfish"])
        actual = set(l.name for l in self.cat.get_layergroups(names=["tasmania", "tiger-ny", "spearfish"]))
        missing = expected - actual
        extras = actual - expected
        message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        tas = self.cat.get_layergroups(names="tasmania")[0]

        self.assert_("tasmania", tas.name)
        self.assert_(isinstance(tas, LayerGroup))
        if self.gs_version >= "2.13":
            self.assertEqual(tas.layers, [
                'topp:tasmania_state_boundaries',
                'topp:tasmania_water_bodies',
                'topp:tasmania_roads',
                'topp:tasmania_cities'
            ], tas.layers)
        else:
            self.assertEqual(tas.layers, [
                'tasmania_state_boundaries',
                'tasmania_water_bodies',
                'tasmania_roads',
                'tasmania_cities'
            ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        # Try to create a new Layer Group into the "topp" workspace
        self.assert_(self.cat.get_workspaces("topp")[0] is not None)
        tas2 = self.cat.create_layergroup("tasmania_reloaded", tas.layers, workspace = "topp")
        self.cat.save(tas2)
        self.assertEqual(1, len(self.cat.get_layergroups(names='tasmania_reloaded', workspaces="topp")))
        tas2 = self.cat.get_layergroups(names='tasmania_reloaded', workspaces="topp")[0]
        self.assert_("tasmania_reloaded", tas2.name)
        self.assert_(isinstance(tas2, LayerGroup))
        self.assertEqual(tas2.workspace, "topp", tas2.workspace)
        if self.gs_version >= "2.13":
            self.assertEqual(tas2.layers, [
                'topp:tasmania_state_boundaries',
                'topp:tasmania_water_bodies',
                'topp:tasmania_roads',
                'topp:tasmania_cities'
            ], tas2.layers)
        else:
            self.assertEqual(tas2.layers, [
                'tasmania_state_boundaries',
                'tasmania_water_bodies',
                'tasmania_roads',
                'tasmania_cities'
            ], tas2.layers)
        self.assertEqual(tas2.styles, [None, None, None, None], tas2.styles)

    def testStyles(self):
        self.assertEqual("population", self.cat.get_styles("population")[0].name)
        self.assertEqual("popshade.sld", self.cat.get_styles("population")[0].filename)
        self.assertEqual("population", self.cat.get_styles("population")[0].sld_name)
        self.assertEqual("population", self.cat.get_style("population").sld_name)
        self.assertIsNone(self.cat.get_style("blah+#5blah-"))
        self.assertEqual(0, len(self.cat.get_styles('non-existing-style')))

    def testEscaping(self):
        # GSConfig is inconsistent about using exceptions vs. returning None
        # when a resource isn't found.
        # But the basic idea is that none of them should throw HTTP errors from
        # misconstructed URLS
        self.cat.get_styles("best style ever")
        self.cat.get_workspaces("best workspace ever")
        self.assertEqual(0, len(self.cat.get_stores(workspaces="best workspace ever", names="best store ever")))
        self.cat.get_layer("best layer ever")
        self.cat.get_layergroups("best layergroup ever")

    def testUnicodeUrl(self):
        """
        Tests that the geoserver.support.url function support unicode strings.
        """

        # Test the url function with unicode
        seg = ['workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml']
        u = build_url(base=self.cat.service_url, seg=seg)
        self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml")

        # Test the url function with normal string
        seg = ['workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml']
        u = build_url(base=self.cat.service_url, seg=seg)
        self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
Beispiel #31
0
class ModifyingTests(unittest.TestCase):

    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD'])

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(
                        [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
                        rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)


        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assert_(self.cat.get_layer('import') is None)
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assert_(self.cat.get_layer('import') is not None)
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            try:
                if lyr:
                    self.cat.delete(lyr)
                    self.cat.delete(lyr.resource)
                if ds:
                    self.cat.delete(ds)
            except:
                pass

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(ds, "import", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })

    @drop_table('import2')
    def testVirtualTables(self):
        ds = self.cat.create_datastore("gsconfig_import_test2")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test2")
        self.cat.add_data_to_store(ds, "import2", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })
        store = self.cat.get_store("gsconfig_import_test2")
        geom = JDBCVirtualTableGeometry('the_geom','MultiPolygon','4326')
        ft_name = 'my_jdbc_vt_test'
        epsg_code = 'EPSG:4326'
        sql = "select * from import2 where 'STATE_NAME' = 'Illinois'"
        keyColumn = None
        parameters = None

        jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters)
        ft = self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt)

    # DISABLED; this test works only in the very particular case
    # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR
    # def testCoverageStoreCreate(self):
    #     ds = self.cat.create_coveragestore2("coverage_gsconfig")
    #     ds.data_url = "file:test/data/mytiff.tiff"
    #     self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs)

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(
            [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
            rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        ws.type = "WMS"
        self.cat.save(ws)

    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            # sanitize the layer name - validation will fail on newer geoservers
            name = layer.replace(':', '_')
            new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

        changed_layer = added_layers[0]
        self.assertEqual(True, changed_layer.advertised)
        self.assertEqual(True, changed_layer.enabled)
        changed_layer.advertised = False
        changed_layer.enabled = False
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        changed_layer = wmsstore.get_resources()[0]
        changed_layer.fetch()
        self.assertEqual(False, changed_layer.advertised)
        self.assertEqual(False, changed_layer.enabled)

        # Testing projection and projection policy changes
        changed_layer.projection = "EPSG:900913"
        changed_layer.projection_policy = "REPROJECT_TO_DECLARED"
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        layer = self.cat.get_layer(changed_layer.name)
        self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy)
        self.assertEqual(layer.resource.projection, changed_layer.projection)

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])

        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)

        self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError,
            lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf)
        )

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)
        )

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)


    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw':  'test/data/Pk50095.tfw',
            'prj':  'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        self.assertRaises(
                ConflictingDataError,
                lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf)
        )

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw':  'test/data/states.shx',
            'prj':  'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)
        )

        ft_ext = self.cat.create_coveragestore_external_geotiff("Pk50095_ext", 'file:test/data/Pk50095.tif', sf)

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = { 'title': 'Not the original attribution',
                            'width': '123',
                            'height': '321',
                            'href': 'http://www.georchestra.org',
                            'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg',
                            'type': 'image/jpeg' }

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")

        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])


    def testStyles(self):
        # check count before tests (upload)
        count = len(self.cat.get_styles())

        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assert_(f is not None)
        self.assertEqual(f.name, fred.name)

        # compare count after upload
        self.assertEqual(count +1, len(self.cat.get_styles()))

        # attempt creating a new style without "title"
        self.cat.create_style("notitle", open("test/notitle.sld").read())
        notitle = self.cat.get_style("notitle")
        self.assertEqual(None, notitle.sld_title)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assert_(jed is None)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assert_(self.cat.get_style("jed", workspace="topp") is None)

        # attempt creating new style
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assert_(f is not None)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp")
        self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assert_(ned is not None)
        self.assert_(zed is not None)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        self.assertEqual("topp:ned", lyr.default_style.fqn)
        self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self):
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

    def testImageMosaic(self):
        """
            Test case for Issue #110
        """
        # testing the mosaic creation
        name = 'cea_mosaic'
        data = open('test/data/mosaic/cea.zip', 'rb')
        self.cat.create_imagemosaic(name, data)

        # get the layer resource back
        self.cat._cache.clear()
        resource = self.cat.get_layer(name).resource

        self.assert_(resource is not None)

        # delete granule from mosaic
        coverage = name
        store = self.cat.get_store(name)
        granule_id = name + '.1'
        self.cat.mosaic_delete_granule(coverage, store, granule_id)


    def testTimeDimension(self):
        sf = self.cat.get_workspace("sf")
        files = shapefile_and_friends(os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date"))
        self.cat.create_featurestore("boxes_with_end_date", files, sf)

        get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer('boxes_with_end_date').resource

        # configure time as LIST
        resource = get_resource()
        timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date")
        resource.metadata = {'time':timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual("date", timeInfo.attribute)
        self.assertEqual("ISO8601", timeInfo.units)

        # disable time dimension
        timeInfo = resource.metadata['time']
        timeInfo.enabled = False
        # since this is an xml property, it won't get written unless we modify it
        resource.metadata = {'time' : timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(False, timeInfo.enabled)

        # configure with interval, end_attribute and enable again
        timeInfo.enabled = True
        timeInfo.presentation = 'DISCRETE_INTERVAL'
        timeInfo.resolution = '3 days'
        timeInfo.end_attribute = 'enddate'
        resource.metadata = {'time' : timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation)
        self.assertEqual('3 days', timeInfo.resolution_str())
        self.assertEqual('enddate', timeInfo.end_attribute)
Beispiel #32
0
    def __init__(self, gs_axx, dico_gs, tipo, txt=''):
        """Use OGR functions to extract basic informations about geoserver.

        gs_axx = tuple like {url of a geoserver, user, password)
        dico_gs = dictionary to store
        tipo = format
        text = dictionary of text in the selected language
        """
        # connection
        cat = Catalog(gs_axx[0],
                      gs_axx[1],
                      gs_axx[2],
                      disable_ssl_certificate_validation=gs_axx[3])
        # print(dir(cat))

        # -- WORKSPACES -------------------------------------------------------
        workspaces = cat.get_workspaces()
        for wk in workspaces:
            # print(wk.name, wk.enabled, wk.resource_type, wk.wmsstore_url)
            dico_gs[wk.name] = wk.href, {}
        # print(dir(wk))

        # -- STORES -----------------------------------------------------------
        # stores = cat.get_stores()
        # for st in stores:
        #     # print(st.name, st.enabled, st.href, st.resource_type)
        #     if hasattr(st, 'url'):
        #         url = st.url
        #     elif hasattr(st, 'resource_url'):
        #         url = st.resource_url
        #     else:
        #         print(dir(st))

        #     dico_gs.get(st.workspace.name)[1][st.name] = {"ds_type": st.type,
        #                                                   "ds_url": url}

        # print(dir(st))
        # print(st.url)
        # print(st.workspace)
        # print(dir(st.workspace))
        # print(st.workspace.name)

        # -- LAYERS -----------------------------------------------------------
        # resources_target = cat.get_resources(workspace='ayants-droits')
        layers = cat.get_layers()
        logging.info("{} layers found".format(len(layers)))
        dico_layers = OrderedDict()
        for layer in layers:
            # print(layer.resource_type)
            lyr_title = layer.resource.title
            lyr_name = layer.name
            lyr_wkspace = layer.resource._workspace.name
            if type(layer.resource) is Coverage:
                lyr_type = "coverage"
            elif type(layer.resource) is FeatureType:
                lyr_type = "vector"
            else:
                lyr_type = type(layer.resource)

            # a log handshake
            logging.info("{} | {} | {} | {}".format(layers.index(layer),
                                                    lyr_type, lyr_name,
                                                    lyr_title))

            # # METADATA LINKS #
            # download link
            md_link_dl = "{0}/geoserver/{1}/ows?request=GetFeature"\
                         "&service=WFS&typeName={1}%3A{2}&version=2.0.0"\
                         "&outputFormat=SHAPE-ZIP"\
                         .format(url_base,
                                 lyr_wkspace,
                                 lyr_name)

            # mapfish links
            md_link_mapfish_wms = "{0}/mapfishapp/?layername={1}"\
                                  "&owstype=WMSLayer&owsurl={0}/"\
                                  "geoserver/{2}/ows"\
                                  .format(url_base,
                                          lyr_name,
                                          lyr_wkspace)

            md_link_mapfish_wfs = "{0}/mapfishapp/?layername={1}"\
                                  "&owstype=WFSLayer&owsurl={0}/"\
                                  "geoserver/{2}/ows"\
                                  .format(url_base,
                                          lyr_name,
                                          lyr_wkspace)

            md_link_mapfish_wcs = "{0}/mapfishapp/?cache=PreferNetwork"\
                                  "&crs=EPSG:2154&format=GeoTIFF"\
                                  "&identifier={1}:{2}"\
                                  "&url={0}/geoserver/ows?"\
                                  .format(url_base,
                                          lyr_wkspace,
                                          lyr_name)

            # OC links
            md_link_oc_wms = "{0}/geoserver/{1}/wms?layers={1}:{2}"\
                             .format(url_base,
                                     lyr_wkspace,
                                     lyr_name)
            md_link_oc_wfs = "{0}/geoserver/{1}/ows?typeName={1}:{2}"\
                             .format(url_base,
                                     lyr_wkspace,
                                     lyr_name)

            md_link_oc_wcs = "{0}/geoserver/{1}/ows?typeName={1}:{2}"\
                             .format(url_base,
                                     lyr_wkspace,
                                     lyr_name)

            # CSW Querier links
            md_link_csw_wms = "{0}/geoserver/ows?service=wms&version=1.3.0"\
                              "&request=GetCapabilities".format(url_base)

            md_link_csw_wfs = "{0}/geoserver/ows?service=wfs&version=2.0.0"\
                              "&request=GetCapabilities".format(url_base)

            # # # SERVICE LINKS #
            # GeoServer Edit links
            gs_link_edit = "{}/geoserver/web/?wicket:bookmarkablePage="\
                           ":org.geoserver.web.data.resource."\
                           "ResourceConfigurationPage"\
                           "&name={}"\
                           "&wsName={}".format(url_base,
                                               lyr_wkspace,
                                               lyr_name)

            # Metadata links (service => metadata)
            if is_uuid(dict_match_gs_md.get(lyr_name)):
                # HTML metadata
                md_uuid_pure = dict_match_gs_md.get(lyr_name)
                srv_link_html = "{}/portail/geocatalogue?uuid={}"\
                                .format(url_base, md_uuid_pure)

                # XML metadata
                md_uuid_formatted = "{}-{}-{}-{}-{}".format(
                    md_uuid_pure[:8], md_uuid_pure[8:12], md_uuid_pure[12:16],
                    md_uuid_pure[16:20], md_uuid_pure[20:])
                srv_link_xml = "http://services.api.isogeo.com/ows/s/"\
                               "{1}/{2}?"\
                               "service=CSW&version=2.0.2&request=GetRecordById"\
                               "&id=urn:isogeo:metadata:uuid:{0}&"\
                               "elementsetname=full&outputSchema="\
                               "http://www.isotc211.org/2005/gmd"\
                               .format(md_uuid_formatted,
                                       csw_share_id,
                                       csw_share_token)
                # add to GeoServer layer
                rzourc = cat.get_resource(lyr_name,
                                          store=layer.resource._store.name)
                rzourc.metadata_links = [
                    ('text/html', 'ISO19115:2003', srv_link_html),
                    ('text/xml', 'ISO19115:2003', srv_link_xml),
                    ('text/html', 'TC211', srv_link_html),
                    ('text/xml', 'TC211', srv_link_xml)
                ]
                # rzourc.metadata_links.append(('text/html', 'other', 'hohoho'))
                cat.save(rzourc)

            else:
                logging.info("Service without metadata: {} ({})".format(
                    lyr_name, dict_match_gs_md.get(lyr_name)))
                pass

            dico_layers[layer.name] = {
                "title": lyr_title,
                "workspace": lyr_wkspace,
                "store_name": layer.resource._store.name,
                "store_type": layer.resource._store.type,
                "lyr_type": lyr_type,
                "md_link_dl": md_link_dl,
                "md_link_mapfish": md_link_mapfish_wms,
                "md_link_mapfish_wms": md_link_mapfish_wms,
                "md_link_mapfish_wfs": md_link_mapfish_wfs,
                "md_link_mapfish_wcs": md_link_mapfish_wcs,
                "md_link_oc_wms": md_link_oc_wms,
                "md_link_oc_wfs": md_link_oc_wfs,
                "md_link_oc_wcs": md_link_oc_wcs,
                "md_link_csw_wms": md_link_csw_wms,
                "md_link_csw_wfs": md_link_csw_wfs,
                "gs_link_edit": gs_link_edit,
                "srv_link_html": srv_link_html,
                "srv_link_xml": srv_link_xml,
                "md_id_matching": md_uuid_pure
            }

            # mem clean up
            # del dico_layer

        # print(dico_gs.get(layer.resource._workspace.name)[1][layer.resource._store.name])
        # print(dir(layer.resource))
        # print(dir(layer.resource.writers))
        # print(dir(layer.resource.writers.get("metadataLinks").func_dict))
        # print(layer.resource.metadata)
        # print(layer.resource.metadata_links)

        dico_gs["layers"] = dico_layers
Beispiel #33
0
def geoserver_pre_save(instance, sender, **kwargs):
    """Send information to geoserver.

       The attributes sent include:

        * Title
        * Abstract
        * Name
        * Keywords
        * Metadata Links,
        * Point of Contact name and url
    """
    url = ogc_server_settings.rest
    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (EnvironmentError, FailedRequestError) as e:
        gs_resource = None
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' % (
                ogc_server_settings.LOCATION, instance.name.encode('utf-8'))
              )
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a synchronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.title = instance.title
    gs_resource.abstract = instance.abstract
    gs_resource.name= instance.name

    # Get metadata links
    metadata_links = []
    for link in instance.link_set.metadata():
        metadata_links.append((link.name, link.mime, link.url))

    gs_resource.metadata_links = metadata_links
    #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
    if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
        gs_catalog.save(gs_resource)

    gs_layer = gs_catalog.get_layer(instance.name)

    if instance.poc and instance.poc.user:
        gs_layer.attribution = str(instance.poc.user)
        profile = Profile.objects.get(user=instance.poc.user)
        gs_layer.attribution_link = settings.SITEURL[:-1] + profile.get_absolute_url()
        #gs_layer should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
        if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
            gs_catalog.save(gs_layer)

    """Get information from geoserver.

       The attributes retrieved include:

       * Bounding Box
       * SRID
       * Download links (WMS, WCS or WFS and KML)
       * Styles (SLD)
    """
    gs_resource = gs_catalog.get_resource(instance.name)

    bbox = gs_resource.latlon_bbox

    #FIXME(Ariel): Correct srid setting below
    #self.srid = gs_resource.src

    # Set bounding box values

    instance.bbox_x0 = bbox[0]
    instance.bbox_x1 = bbox[1]
    instance.bbox_y0 = bbox[2]
    instance.bbox_y1 = bbox[3]

    instance.update_thumbnail(save=False)
Beispiel #34
0
def geoserver_pre_save(instance, sender, **kwargs):
    """Send information to geoserver.

       The attributes sent include:

        * Title
        * Abstract
        * Name
        * Keywords
        * Metadata Links,
        * Point of Contact name and url
    """
    url = ogc_server_settings.internal_rest
    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (EnvironmentError, FailedRequestError) as e:
        gs_resource = None
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' % (
                ogc_server_settings.LOCATION, instance.name.encode('utf-8'))
              )
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a synchronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.title = instance.title
    gs_resource.abstract = instance.abstract
    gs_resource.name= instance.name

    # Get metadata links
    metadata_links = []
    for link in instance.link_set.metadata():
        metadata_links.append((link.name, link.mime, link.url))

    gs_resource.metadata_links = metadata_links
    #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
    if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
        gs_catalog.save(gs_resource)

    gs_layer = gs_catalog.get_layer(instance.name)

    if instance.poc and instance.poc.user:
        gs_layer.attribution = str(instance.poc.user)
        profile = Profile.objects.get(user=instance.poc.user)
        gs_layer.attribution_link = settings.SITEURL[:-1] + profile.get_absolute_url()
        #gs_layer should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
        if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
            gs_catalog.save(gs_layer)

    """Get information from geoserver.

       The attributes retrieved include:

       * Bounding Box
       * SRID
       * Download links (WMS, WCS or WFS and KML)
       * Styles (SLD)
    """
    gs_resource = gs_catalog.get_resource(instance.name)

    bbox = gs_resource.latlon_bbox

    #FIXME(Ariel): Correct srid setting below
    #self.srid = gs_resource.src

    # Set bounding box values

    instance.bbox_x0 = bbox[0]
    instance.bbox_x1 = bbox[1]
    instance.bbox_y0 = bbox[2]
    instance.bbox_y1 = bbox[3]

    instance.update_thumbnail(save=False)
Beispiel #35
0
def _register_cascaded_layers(service, owner=None):
    """
    Register layers for a cascading WMS
    """
    if service.type == "WMS" or service.type == "OWS":
        cat = Catalog(settings.OGC_SERVER["default"]["LOCATION"] + "rest", _user, _password)
        # Can we always assume that it is geonode?
        # Should cascading layers have a separate workspace?
        cascade_ws = cat.get_workspace(service.name)
        if cascade_ws is None:
            cascade_ws = cat.create_workspace(service.name, "cascade")
        try:
            store = cat.get_store(service.name, cascade_ws)
        except Exception:
            store = cat.create_wmsstore(service.name, cascade_ws)
            cat.save(store)
        wms = WebMapService(service.base_url)
        layers = list(wms.contents)
        count = 0
        for layer in layers:
            lyr = cat.get_resource(layer, store, cascade_ws)
            if lyr is None:
                if service.type in ["WMS", "OWS"]:
                    resource = cat.create_wmslayer(cascade_ws, store, layer)
                elif service.type == "WFS":
                    resource = cat.create_wfslayer(cascade_ws, store, layer)

                if resource:
                    bbox = resource.latlon_bbox
                    cascaded_layer, created = Layer.objects.get_or_create(
                        typename="%s:%s" % (cascade_ws.name, resource.name),
                        service=service,
                        defaults={
                            "name": resource.name,
                            "workspace": cascade_ws.name,
                            "store": store.name,
                            "storeType": store.resource_type,
                            "title": resource.title or "No title provided",
                            "abstract": resource.abstract or "No abstract provided",
                            "owner": None,
                            "uuid": str(uuid.uuid4()),
                            "bbox_x0": bbox[0],
                            "bbox_x1": bbox[1],
                            "bbox_y0": bbox[2],
                            "bbox_y1": bbox[3],
                        },
                    )

                    if created:
                        cascaded_layer.save()
                        if cascaded_layer is not None and cascaded_layer.bbox is None:
                            cascaded_layer._populate_from_gs(gs_resource=resource)
                        cascaded_layer.set_default_permissions()

                        service_layer, created = ServiceLayer.objects.get_or_create(
                            service=service, typename=cascaded_layer.name
                        )
                        service_layer.layer = cascaded_layer
                        service_layer.title = (cascaded_layer.title,)
                        service_layer.description = (cascaded_layer.abstract,)
                        service_layer.styles = cascaded_layer.styles
                        service_layer.save()

                        count += 1
                    else:
                        logger.error("Resource %s from store %s could not be saved as layer" % (layer, store.name))
        message = "%d Layers Registered" % count
        return_dict = {"status": "ok", "msg": message}
        return HttpResponse(json.dumps(return_dict), mimetype="application/json", status=200)
    elif service.type == "WCS":
        return HttpResponse("Not Implemented (Yet)", status=501)
    else:
        return HttpResponse("Invalid Service Type", status=400)
Beispiel #36
0
def geoserver_post_save(instance, sender, **kwargs):
    """Save keywords to GeoServer

       The way keywords are implemented requires the layer
       to be saved to the database before accessing them.
    """
    url = ogc_server_settings.internal_rest
    
    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (FailedRequestError, EnvironmentError) as e:
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' % (
                ogc_server_settings.LOCATION, instance.name.encode('utf-8'))
              )
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a synchronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.keywords = instance.keyword_list()
    #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
    if getattr(ogc_server_settings,"BACKEND_WRITE_ENABLED", True):
        gs_catalog.save(gs_resource)

    bbox = gs_resource.latlon_bbox
    dx = float(bbox[1]) - float(bbox[0])
    dy = float(bbox[3]) - float(bbox[2])

    dataAspect = 1 if dy == 0 else dx / dy

    height = 550
    width = int(height * dataAspect)

    # Set download links for WMS, WCS or WFS and KML

    links = wms_links(ogc_server_settings.public_url + 'wms?',
                    instance.typename.encode('utf-8'), instance.bbox_string,
                    instance.srid, height, width)

    for ext, name, mime, wms_url in links:
        Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        name=ugettext(name),
                        defaults=dict(
                            extension=ext,
                            url=wms_url,
                            mime=mime,
                            link_type='image',
                           )
                        )

    if instance.storeType == "dataStore":
        links = wfs_links(ogc_server_settings.public_url + 'wfs?', instance.typename.encode('utf-8'))
        for ext, name, mime, wfs_url in links:
            Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                            url=wfs_url,
                            defaults=dict(
                                extension=ext,
                                name=name,
                                mime=mime,
                                url=wfs_url,
                                link_type='data',
                            )
                        )


    elif instance.storeType == 'coverageStore':
        #FIXME(Ariel): This works for public layers, does it work for restricted too?
        # would those end up with no geotiff links, like, forever?
        permissions = {}
        permissions['anonymous'] = instance.get_gen_level(ANONYMOUS_USERS)
        permissions['authenticated'] = instance.get_gen_level(AUTHENTICATED_USERS)
        instance.set_gen_level(ANONYMOUS_USERS,'layer_readonly')

        links = wcs_links(ogc_server_settings.public_url + 'wcs?', instance.typename.encode('utf-8'),
            bbox=instance.bbox[:-1], crs=instance.bbox[-1], height=height, width=width)
        for ext, name, mime, wcs_url in links:
            Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                                url=wcs_url,
                                defaults=dict(
                                    extension=ext,
                                    name=name,
                                    mime=mime,
                                    link_type='data',
                                )
                            )
            
        instance.set_gen_level(ANONYMOUS_USERS,permissions['anonymous'])
        instance.set_gen_level(AUTHENTICATED_USERS,permissions['authenticated'])

    kml_reflector_link_download = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode({
        'layers': instance.typename.encode('utf-8'),
        'mode': "download"
    })

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=kml_reflector_link_download,
                        defaults=dict(
                            extension='kml',
                            name=_("KML"),
                            mime='text/xml',
                            link_type='data',
                        )
                    )

    kml_reflector_link_view = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode({
        'layers': instance.typename.encode('utf-8'),
        'mode': "refresh"
    })

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=kml_reflector_link_view,
                        defaults=dict(
                            extension='kml',
                            name=_("View in Google Earth"),
                            mime='text/xml',
                            link_type='data',
                        )
                    )

    tile_url = ('%sgwc/service/gmaps?' % ogc_server_settings.public_url +
                'layers=%s' % instance.typename.encode('utf-8') +
                '&zoom={z}&x={x}&y={y}' +
                '&format=image/png8'
                )

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=tile_url,
                        defaults=dict(
                            extension='tiles',
                            name=_("Tiles"),
                            mime='image/png',
                            link_type='image',
                            )
                        )


    html_link_url = '%s%s' % (settings.SITEURL[:-1], instance.get_absolute_url())

    Link.objects.get_or_create(resource= instance.resourcebase_ptr,
                        url=html_link_url,
                        defaults=dict(
                            extension='html',
                            name=instance.typename,
                            mime='text/html',
                            link_type='html',
                            )
                        )

    #remove links that belong to and old address

    for link in instance.link_set.all():
        if not urlparse(settings.SITEURL).hostname == urlparse(link.url).hostname and not \
                    urlparse(ogc_server_settings.public_url).hostname == urlparse(link.url).hostname:
            link.delete()

    #Save layer attributes
    set_attributes(instance)

    #Save layer styles
    set_styles(instance, gs_catalog)
Beispiel #37
0
class CatalogTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testAbout(self):
        about_html = self.cat.about()
        self.assertTrue(
            '<html xmlns="http://www.w3.org/1999/xhtml"' in about_html)

    def testGSVersion(self):
        version = self.cat.gsversion()
        pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)')
        self.assertTrue(pat.match('2.2.x'))
        self.assertTrue(pat.match('2.3.2'))
        self.assertTrue(pat.match('2.3-SNAPSHOT'))
        self.assertFalse(pat.match('2.3.y'))
        self.assertFalse(pat.match('233'))
        self.assertTrue(pat.match(version))

    def testWorkspaces(self):
        self.assertEqual(7, len(self.cat.get_workspaces()))
        # marking out test since geoserver default workspace is not consistent
        # self.assertEqual("cite", self.cat.get_default_workspace().name)
        self.assertEqual("topp", self.cat.get_workspace("topp").name)

    def testStores(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        self.assertEqual(9, len(self.cat.get_stores()))
        self.assertEqual(2, len(self.cat.get_stores(topp)))
        self.assertEqual(2, len(self.cat.get_stores(sf)))
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile", topp).name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile").name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile").name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem").name)

    def testResources(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        states = self.cat.get_store("states_shapefile", topp)
        sfdem = self.cat.get_store("sfdem", sf)
        self.assertEqual(19, len(self.cat.get_resources()))
        self.assertEqual(1, len(self.cat.get_resources(states)))
        self.assertEqual(5, len(self.cat.get_resources(workspace=topp)))
        self.assertEqual(1, len(self.cat.get_resources(sfdem)))
        self.assertEqual(6, len(self.cat.get_resources(workspace=sf)))

        self.assertEqual("states",
                         self.cat.get_resource("states", states).name)
        self.assertEqual("states",
                         self.cat.get_resource("states", workspace=topp).name)
        self.assertEqual("states", self.cat.get_resource("states").name)
        states = self.cat.get_resource("states")

        fields = [
            states.title, states.abstract, states.native_bbox,
            states.latlon_bbox, states.projection, states.projection_policy
        ]

        self.assertFalse(None in fields, str(fields))
        self.assertFalse(len(states.keywords) == 0)
        self.assertFalse(len(states.attributes) == 0)
        self.assertTrue(states.enabled)

        self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name)
        self.assertEqual("sfdem",
                         self.cat.get_resource("sfdem", workspace=sf).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem").name)

    def testLayers(self):
        expected = set([
            "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem",
            "bugsites", "restricted", "streams", "archsites", "roads",
            "tasmania_roads", "tasmania_water_bodies",
            "tasmania_state_boundaries", "tasmania_cities", "states",
            "poly_landmarks", "tiger_roads", "poi", "giant_polygon"
        ])
        actual = set(l.name for l in self.cat.get_layers())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (
            extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        states = self.cat.get_layer("states")

        self.assert_("states", states.name)
        self.assert_(isinstance(states.resource, ResourceInfo))
        self.assertEqual(set(s.name for s in states.styles),
                         set(['pophatch', 'polygon']))
        self.assertEqual(states.default_style.name, "population")

    def testLayerGroups(self):
        expected = set(["tasmania", "tiger-ny", "spearfish"])
        actual = set(l.name for l in self.cat.get_layergroups())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (
            extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        tas = self.cat.get_layergroup("tasmania")

        self.assert_("tasmania", tas.name)
        self.assert_(isinstance(tas, LayerGroup))
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

    def testStyles(self):
        self.assertEqual(20, len(self.cat.get_styles()))
        self.assertEqual("population", self.cat.get_style("population").name)
        self.assertEqual("popshade.sld",
                         self.cat.get_style("population").filename)
        self.assertEqual("population",
                         self.cat.get_style("population").sld_name)
        self.assert_(self.cat.get_style('non-existing-style') is None)

    def testEscaping(self):
        # GSConfig is inconsistent about using exceptions vs. returning None
        # when a resource isn't found.
        # But the basic idea is that none of them should throw HTTP errors from
        # misconstructed URLS
        self.cat.get_style("best style ever")
        self.cat.get_workspace("best workspace ever")
        try:
            self.cat.get_store(workspace="best workspace ever",
                               name="best store ever")
            self.fail('expected exception')
        except FailedRequestError, fre:
            self.assertEqual('No store found named: best store ever',
                             fre.message)
        try:
            self.cat.get_resource(workspace="best workspace ever",
                                  store="best store ever",
                                  name="best resource ever")
        except FailedRequestError, fre:
            self.assertEqual('No store found named: best store ever',
                             fre.message)
cat = Catalog("http://localhost:8080/geoserver/rest")

resources = cat.get_resources()
gliStores = cat.get_stores()
iWorkspaces = cat.get_workspaces()

for loWorkspace in iWorkspaces:
    print loWorkspace.name

print

for loStore in gliStores:
    print loStore.name

print

iLayers = cat.get_layers()

for ilLayer in iLayers:
    ilNomeLayer = ilLayer.name
    print ilNomeLayer

print

for ilLayer in iLayers:
    ilNomeLayer = ilLayer.name
    if ilNomeLayer == 'ports_eu':
        ilLayerSelezionatoRisorsa = cat.get_resource(ilNomeLayer)
        print ilLayerSelezionatoRisorsa.attributes