Beispiel #1
0
def _create_geoserver_geonode_layer(new_table, sld_type, title, the_user,
                                    permissions):
    # Create the Layer in GeoServer from the table
    try:
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                      settings.OGC_SERVER['default']['USER'],
                      settings.OGC_SERVER['default']['PASSWORD'])
        ds = cat.get_store("uploaded")  # name of store in WFP-Geonode
        cat.publish_featuretype(new_table, ds, "EPSG:4326", srs="EPSG:4326")

        # changing the layer's title in the title set by the user
        resource = cat.get_resource(new_table, workspace="geonode")
        resource.title = title
        cat.save(resource)

    except Exception as e:

        msg = "Error creating GeoServer layer for %s: %s" % (new_table, str(e))
        print msg
        return None, msg

    # Create the Layer in GeoNode from the GeoServer Layer
    try:

        link_to_sld = "{location}styles/{sld_type}".format(
            **{
                'location': settings.OGC_SERVER['default']['LOCATION'],
                'sld_type': sld_type
            })

        r = requests.get(link_to_sld)
        sld = r.text
        sld = sld.replace(
            "name_of_layer", new_table
        )  # "name_of_layer" is set in the predefined sld in geoserver (polygon_style, line_style, point_style)
        cat.create_style(new_table, sld, overwrite=True)
        style = cat.get_style(new_table)
        layer = cat.get_layer(new_table)
        layer.default_style = style
        cat.save(layer)
        gs_slurp(owner=the_user, filter=new_table)

        from geonode.base.models import ResourceBase
        layer = ResourceBase.objects.get(title=title)
        geoserver_post_save(layer, ResourceBase)

        # assign permissions for this layer
        permissions_dict = json.loads(permissions)  # needs to be dictionary
        if permissions_dict is not None and len(permissions_dict.keys()) > 0:
            layer.set_permissions(permissions_dict)

    except Exception as e:
        msg = "Error creating GeoNode layer for %s: %s" % (new_table, str(e))
        return None, msg
Beispiel #2
0
    def addLayersToGeoserver(self, options):
        cat = Catalog(self.geoserver_rest_url, options["geoserveradmin"], options["gpw"])

        try:
            ds = cat.get_store(options["alias"])
        except Exception as e:
            raise Exception("Erreur de récupération du workspace")

        layers = []
        try:
            # connect to tables and create layers and correct urban styles
            for table in self.urb:
                try:
                    style = self.urb[table]
                    ft = cat.publish_featuretype(table, ds, "EPSG:31370", srs="EPSG:31370")
                    gs_style = cat.get_style(style)
                    cat.save(ft)
                    res_name = ft.dirty["name"]
                    res_title = options["alias"] + "_" + table
                    cat.save(ft)
                    layer_name = ds.workspace.name + ":" + res_name
                    new_layer = cat.get_layer(layer_name)
                    new_layer.default_style = gs_style
                    cat.save(new_layer)
                    layers.append({"res_name": res_name, "res_title": res_title})
                except Exception as e:
                    # a verifier une fois un possesion des styles
                    print(str(e))

        except Exception as e:
            print(str(e))
            raise Exception("Erreur lors de la récupération des couches depuis Geoserver")

        return layers
Beispiel #3
0
    def addLayersToGeoserver(self, options):
        cat = Catalog(self.geoserver_rest_url, options['geoserveradmin'], options['gpw'])

        try:
            ds = cat.get_store(options['alias'])
        except Exception as e:
            raise Exception('Erreur de récupération du workspace')

        layers = []
        try:
            #connect to tables and create layers and correct urban styles
            for table in self.urb:
                try:
                    style = self.urb[table]
                    ft = cat.publish_featuretype(table, ds, 'EPSG:31370', srs='EPSG:31370')
                    gs_style = cat.get_style(style)
                    cat.save(ft)
                    res_name = ft.dirty['name']
                    res_title = options['alias']+"_"+table
                    cat.save(ft)
                    layer_name = ds.workspace.name + ':' + res_name
                    new_layer = cat.get_layer(layer_name)
                    new_layer.default_style = gs_style
                    cat.save(new_layer)
                    layers.append({ 'res_name' : res_name, 'res_title' : res_title })
                except Exception as e:
                    # a verifier une fois un possesion des styles
                    print(str(e))

        except Exception as e:
            print(str(e))
            raise Exception('Erreur lors de la récupération des couches depuis Geoserver')

        return layers
Beispiel #4
0
def main(options):
  #connect to geoserver
  cat = Catalog("http://localhost:8080/geoserver/rest", "admin", options.gpw)
  
  #create datrastore for URB schema
  ws = cat.create_workspace(options.alias,'imio.be')
  
  ds = cat.create_datastore(options.alias, ws)
  ds.connection_parameters.update(
      host=options.urbanUrl,
      port="5432",
      database=options.database,
      user="******",
      passwd=options.ropw,
      dbtype="postgis")
  
  cat.save(ds)
  ds = cat.get_store(options.alias)
  
  #config object
  urb = {
  	"capa":"Parcelles",
  	"toli":"cadastre_ln_toponymiques",
  	"canu":"cadastre_pt_num",
  	"cabu":"Batiments",
  	"gept":"cadastre_points_generaux",
  	"gepn":"cadastre_pol_gen",
  	"inpt":"point",
  	"geli":"cadastre_ln_generales",
  	"inli":"cadastre_ln_informations",
  	"topt":"point",
  	}   	
  	
  #connect to tables and create layers and correct urban styles
  for table in urb:
    style = urb[table]
    ft = cat.publish_featuretype(table, ds, 'EPSG:31370', srs='EPSG:31370')
    ft.default_style = style
    cat.save(ft)
    resource = ft.resource
    resource.title = options.alias+"_"+table
    resource.save()
    
    layer, created = Layer.objects.get_or_create(name=layerName, defaults={
      	            "workspace": ws.name,
                    "store": ds.name,
                    "storeType": ds.resource_type,
                    "typename": "%s:%s" % (ws.name.encode('utf-8'), resource.name.encode('utf-8')),
                    "title": resource.title or 'No title provided',
                    "abstract": resource.abstract or 'No abstract provided',
                    #"owner": owner,
                    "uuid": str(uuid.uuid4()),
                    "bbox_x0": Decimal(resource.latlon_bbox[0]),
                    "bbox_x1": Decimal(resource.latlon_bbox[1]),
                    "bbox_y0": Decimal(resource.latlon_bbox[2]),
                    "bbox_y1": Decimal(resource.latlon_bbox[3])
      	         })
    set_attributes(layer, overwrite=True)
    if created: layer.set_default_permissions()
Beispiel #5
0
def handle_geoserver_layers(view_names,
                            store_name,
                            db_params,
                            schema_name,
                            logger=print):
    """Publish database views as GeoServer layers

    Parameters
    ----------
    view_pairs: list
        An iterable of two-element tuples with the names of the
        ``coarse`` and ``detail`` database views that are to be published
        as GeoServer layers
    store_name: str
        Name of the GeoServer store to use. This store will be created in case
        it does not exist
    db_params: dict
        A mapping with parameters used for connecting to the database
    schema_name: str
        Name of the database schema where the views to publish reside
    logger: function, optional
        A function that is used to output information.

    Returns
    -------
    list
        An iterable with the geoserver layers that have been published

    """

    gs_catalog = Catalog(
        service_url=settings.OGC_SERVER["default"]["LOCATION"] + "rest",
        username=settings.OGC_SERVER["default"]["USER"],
        password=settings.OGC_SERVER["default"]["PASSWORD"])
    logger("Retrieving geoserver workspace...")
    workspace = get_geoserver_workspace(gs_catalog)
    store = gs_catalog.get_store(store_name, workspace=workspace)
    if store is None:
        logger("Creating geoserver store...")
        store = get_postgis_store(gs_catalog, store_name, workspace, db_params,
                                  schema_name)
    layers = []
    for view_name in view_names:
        logger("Adding {!r} as a geoserver layer...".format(view_name))
        featuretype = gs_catalog.publish_featuretype(view_name,
                                                     store,
                                                     "EPSG:4326",
                                                     srs="EPSG:4326")
        gs_catalog.save(featuretype)
        layers.append(featuretype)
    return layers
Beispiel #6
0
def handle_geoserver_layer(table_name, store_name, db_params, schema_name,
                           default_style_name="", logger=print):
    """Publish a database table/view as a GeoServer layer

    Parameters
    ----------
    store_name: str
        Name of the GeoServer store to use. This store will be created in case
        it does not exist
    db_params: dict
        A mapping with parameters used for connecting to the database
    schema_name: str
        Name of the database schema where the views to publish reside
    logger: function, optional
        A function that is used to output information.

    Returns
    -------
    list
        An iterable with the geoserver layers that have been published

    """

    logger("inside handle_geoserver_layer: {}".format(locals()))

    gs_catalog = Catalog(
        service_url=settings.OGC_SERVER["default"]["LOCATION"] + "rest",
        username=settings.OGC_SERVER["default"]["USER"],
        password=settings.OGC_SERVER["default"]["PASSWORD"]
    )
    logger("Retrieving geoserver workspace...")
    workspace = get_geoserver_workspace(gs_catalog)
    store = gs_catalog.get_store(store_name, workspace=workspace)
    if store is None:
        logger("Creating geoserver store...")
        store = get_postgis_store(gs_catalog, store_name, workspace, db_params,
                                  schema_name)
    logger("Adding {!r} as a geoserver layer...".format(table_name))
    featuretype = gs_catalog.publish_featuretype(
        table_name, store, "EPSG:4326", srs="EPSG:4326")
    logger("dir(featuretype): {}".format(dir(featuretype)))
    layer = gs_catalog.get_layer(featuretype.name)
    if default_style_name != "":
        logger("Setting default style for layer...")
        layer._set_default_style(default_style_name)
    gs_catalog.save(featuretype)
    gs_catalog.save(layer)
    return featuretype
Beispiel #7
0
def createLayer():
    cat = Catalog('http://localhost:8081/geoserver/rest/', 'admin',
                  'geoserver')
    store = cat.get_store('test')
    geom = JDBCVirtualTableGeometry('newgeom', 'LineString', '4326')
    ft_name = 'my_jdbc_vt_test'
    epsg_code = 'EPSG:4326'
    sql = 'select ST_MakeLine(wkb_geometry ORDER BY waypoint) As newgeom, assetid, runtime from waypoints group by assetid,runtime'
    keyColumn = None
    parameters = None

    jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn,
                               parameters)
    ft = cat.publish_featuretype(ft_name,
                                 store,
                                 epsg_code,
                                 jdbc_virtual_table=jdbc_vt)
Beispiel #8
0
class UploaderTests(MapStoryTestMixin):
    """
    Basic checks to make sure pages load, etc.
    """

    def create_datastore(self, connection, catalog):
        settings = connection.settings_dict
        params = {'database': settings['NAME'],
                  'passwd': settings['PASSWORD'],
                  'namespace': 'http://www.geonode.org/',
                  'type': 'PostGIS',
                  'dbtype': 'postgis',
                  'host': settings['HOST'],
                  'user': settings['USER'],
                  'port': settings['PORT'],
                  'enabled': "True"}

        store = catalog.create_datastore(settings['NAME'], workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(settings['NAME'])

    def setUp(self):

        if not os.path.exists(os.path.join(os.path.split(__file__)[0], 'test_ogr')):
            self.skipTest('Skipping test due to missing test data.')

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.username, self.password = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_username, self.non_admin_password = self.create_user('non_admin', 'non_admin')
        self.cat = Catalog(ogc_server_settings.internal_rest, *ogc_server_settings.credentials)
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.cat)

    def tearDown(self):
        """
        Clean up geoserver.
        """
        self.cat.delete(self.datastore, recurse=True)

    def generic_import(self, file, configuration_options=[{'index': 0}]):

        f = file
        filename = os.path.join(os.path.dirname(__file__), 'test_ogr', f)

        res = self.import_file(filename, configuration_options=configuration_options)
        res = res[0]

        layer = res['layers'][0]

        self.assertEqual(layer['status'], 'updated')
        layer = Layer.objects.get(name=layer['name'])
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')

        if not filename.endswith('zip'):
            self.assertTrue(layer.attributes.count() >= DataSource(filename)[0].num_fields)

        # make sure we have at least one dateTime attribute
        self.assertTrue('xsd:dateTime' or 'xsd:date' in [n.attribute_type for n in layer.attributes.all()])

        return layer

    def test_box_with_year_field(self):
        """
        Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import('boxes_with_year_field.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """
        Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import('boxes_with_date.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """
        Tests a CSV with WKT polygon.
        """

        layer = self.generic_import('boxes_with_date.csv', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_iso_date(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_iso_date_zip(self):
        """
        Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import('boxes_with_date_iso_date.zip', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])
        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('boxes_with_dates_bc.shp', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)

        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """
        Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import('point_with_date.geojson', configuration_options=[{'index': 0,
                                                                                         'convert_to_date': ['date']}])

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))

        date_attr = filter(lambda attr: attr.attribute == 'date_as_date', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """
        Tests the import of test_boxes_with_end_date.
        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import('boxes_with_end_date.shp')

        date_attr = filter(lambda attr: attr.attribute == 'date', layer.attributes)[0]
        end_date_attr = filter(lambda attr: attr.attribute == 'enddate', layer.attributes)[0]

        self.assertEqual(date_attr.attribute_type, 'xsd:date')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:date')

        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute,
                       end_attribute=end_date_attr.attribute)

        self.generic_time_check(layer, attribute=date_attr.attribute, end_attribute=end_date_attr.attribute)

    def test_us_states_kml(self):
        """
        Tests the import of us_states_kml.
        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.

        layer = self.generic_import('us_states.kml')

    def test_mojstrovka_gpx(self):
        """
        Tests the import of mojstrovka.gpx.
        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import('mojstrovka.gpx')
        date_attr = filter(lambda attr: attr.attribute == 'time', layer.attributes)[0]
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_attr.attribute)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """
        Convenience method to run generic tests on time layers.
        """
        self.cat._cache.clear()
        resource = self.cat.get_resource(layer.name, store=layer.store, workspace=self.workspace)

        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual(attribute, timeInfo.attribute)
        self.assertEqual(end_attribute, timeInfo.end_attribute)

    def test_us_shootings_csv(self):
        """
        Tests the import of US_shootings.csv.
        """

        filename = 'US_shootings.csv'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        layer = self.generic_import(filename, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}])
        self.assertEqual(layer.name, 'us_shootings')

        date_field = 'date_as_date'
        configure_time(self.cat.get_layer(layer.name).resource, attribute=date_field)
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """
        Tests the import of US_shootings.csv.
        """

        filename = 'US_Civil_Rights_Sitins0.csv'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        layer = self.generic_import(f, configuration_options=[{'index': 0, 'convert_to_date': ['Date']}])

    def get_layer_names(self, in_file):
        """
        Gets layer names from a data source.
        """
        ds = DataSource(in_file)
        return map(lambda layer: layer.name, ds)

    def test_gdal_import(self):
        filename = 'point_with_date.geojson'
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', filename)
        self.generic_import(filename, configuration_options=[{'index': 0,  'convert_to_date': ['date']}])

    def import_file(self, in_file, configuration_options=[]):
        """
        Imports the file.
        """
        self.assertTrue(os.path.exists(in_file))

        # run ogr2ogr
        gi = GDALImport(in_file)
        layers = gi.handle(configuration_options=configuration_options)

        d = db.connections['datastore'].settings_dict
        connection_string = "PG:dbname='%s' user='******' password='******'" % (d['NAME'], d['USER'], d['PASSWORD'])

        output = []
        for layer, layer_config in layers:
            # expose the featureType in geoserver
            self.cat.publish_featuretype(layer, self.datastore, 'EPSG:4326')
            output.append(gs_slurp(workspace='geonode', store=self.datastore.name, filter=layer))

        return output


    @staticmethod
    def createFeatureType(catalog, datastore, name):
        """
        Exposes a PostGIS feature type in geoserver.
        """
        headers = {"Content-type": "application/xml"}
        data = "<featureType><name>{name}</name></featureType>".format(name=name)
        url = datastore.href.replace(".xml", '/featuretypes.xml'.format(name=name))
        headers, response = catalog.http.request(url, "POST ", data, headers)
        return response

    def test_create_vrt(self):
        """
        Tests the create_vrt function.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv')

        vrt = create_vrt(f)
        vrt.seek(0)
        output = vrt.read()

        self.assertTrue('name="US_shootings"' in output)
        self.assertTrue('<SrcDataSource>{0}</SrcDataSource>'.format(f) in output)
        self.assertTrue('<GeometryField encoding="PointFromColumns" x="Longitude" y="Latitude" />'.format(f) in output)
        self.assertEqual(os.path.splitext(vrt.name)[1], '.vrt')

    def test_file_add_view(self):
        """
        Tests the file_add_view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()

        # test login required for this view
        request = c.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path, reverse('uploads-list'))
        self.assertTrue(len(response.context['object_list']) == 1)

        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))


        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'empty_file.geojson')

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_describe_fields(self):
        """
        Tests the describe fields functionality.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv')
        fields = None

        with GDALInspector(f) as f:
            layers = f.describe_fields()

        self.assertTrue(layers[0]['name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed',
                                                                    'Wounded', 'Location', 'City',
                                                                    'Longitude', 'Latitude'])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """
        Tests the describe fields functionality.
        """
        files = ((os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_shootings.csv'), 'CSV'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson'), 'GeoJSON'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'mojstrovka.gpx'), 'GPX'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'us_states.kml'), 'KML'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_year_field.shp'), 'ESRI Shapefile'),
                 (os.path.join(os.path.dirname(__file__), 'test_ogr', 'boxes_with_date_iso_date.zip'), 'ESRI Shapefile'),
        )

        for path, file_type in files:
            with GDALInspector(path) as f:
                self.assertEqual(f.file_type(), file_type)

    def test_configure_view(self):
        """
        Tests the configuration view.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True,
                    'editable': True}]

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')

        self.assertTrue(response.status_code, 200)
        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(Layer.objects.all()[0].owner.username, self.non_admin_username)

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)

    def test_configure_view_convert_date(self):
        """
        Tests the configure view with a dataset that needs to be converted to a date.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        upload = response.context['object_list'][0]

        payload = [{'index': 0,
                    'convert_to_date': ['Date'],
                    'start_date': 'Date',
                    'configureTime': True,
                    'editable': True}]


        response = c.get('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 405)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 400)

        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id), data=json.dumps(payload),
                          content_type='application/json')
        self.assertTrue(response.status_code, 200)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(layer.attributes.filter(attribute='date_as_date'), 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        # ensure a user who does not own the upload cannot configure an import from it.
        c.logout()
        c.login_as_admin()
        response = c.post('/importer-api/data-layers/{0}/configure/'.format(upload.id))
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        c = AdminClient()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.username)
        non_admin = User.objects.get(username=self.non_admin_username)
        from .models import UploadFile

        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'US_shootings.csv')

        with open(f, 'rb') as f:
            uploaded_file = SimpleUploadedFile('test_data', f.read())
            admin_upload = UploadedData.objects.create(state='Admin test', user=admin)
            admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

        c.login_as_admin()
        response = c.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = c.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)


    def test_layer_list_api(self):
        c = AdminClient()
        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        c.login_as_non_admin()

        response = c.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """
        Ensure users can delete their data.
        """
        c = AdminClient()

        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)
        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """
        Ensure that administrators can delete data that isn't theirs.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        self.assertEqual(UploadedData.objects.all().count(), 1)

        c.logout()
        c.login_as_admin()

        id = UploadedData.objects.first().id
        response = c.delete('/importer-api/data/{0}/'.format(id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """
        Tests providing a name in the configuration options.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()
        name = 'point-with-a-date'
        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        payload = {'index': 0,
                   'convert_to_date': ['date'],
                   'start_date': 'date',
                   'configureTime': True,
                   'name': name,
                   'editable': True}

        response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps(payload),
                          content_type='application/json')

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """
        Tests the import api.
        """
        f = os.path.join(os.path.dirname(__file__), 'test_ogr', 'point_with_date.geojson')
        c = AdminClient()
        c.login_as_non_admin()

        with open(f) as fp:
            response = c.post(reverse('uploads-new'), {'file': fp}, follow=True)

        payload = {'index': 0,
                   'convert_to_date': ['date'],
                   'start_date': 'date',
                   'configureTime': True,
                   'editable': True}

        self.assertTrue(isinstance(UploadLayer.objects.first().configuration_options, dict))

        response = c.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = c.post('/importer-api/data-layers/1/configure/', data=json.dumps([payload]),
                          content_type='application/json')

        self.assertEqual(response.status_code, 200)
        self.assertTrue('task' in response.content)

        layer = Layer.objects.all()[0]
        self.assertEqual(layer.srid, 'EPSG:4326')
        self.assertEqual(layer.store, self.datastore.name)
        self.assertEqual(layer.storeType, 'dataStore')
        self.assertTrue(layer.attributes[1].attribute_type, 'xsd:dateTime')

        lyr = self.cat.get_layer(layer.name)
        self.assertTrue('time' in lyr.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """
        Test the file extension validator.
        """

        for extension in IMPORTER_VALID_EXTENSIONS:
            self.assertIsNone(validate_file_extension(SimpleUploadedFile('test.{0}'.format(extension), '')))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """
        Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """
        Regression test for numeric field overflows in shapefiles.
        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import('Walmart.zip', configuration_options=[{'index': 0, 'convert_to_date': []}])
Beispiel #9
0
class Extractor:
    def __init__(self, input_dir):
        logging.basicConfig(filename='extractor.log', level=logging.INFO)
        self.table_count = 0
        self.working_dir = input_dir
        self.schemas = []
        self.tables = {}
        # pg connexion
        self.conn = psycopg2.connect\
            (dbname=config.PGDATABASE, user=config.PGUSER,
             password=config.PGPASSWORD, host=config.PGHOST)
        self.cur = self.conn.cursor()
        self.sql_commands = []
        #gs config
        self.cat = None

    def get_files_by_format(self, format):
        logging.info('Started')
        for subdir, dirs, files in os.walk(self.working_dir):
            # lower and remove spaces
            schema = ''.join(s.lower() for s in os.path.basename(subdir)
                             if os.path.basename(subdir) and not s.isspace())
            if schema != '': self.schemas.append(schema)
            for file in files:
                if file.endswith(".{}".format(format)):
                    table_name = ''.join(s.lower() for s in file
                                         if not s.isspace())
                    self.tables[Path(table_name).resolve().stem] = {
                        'schema': schema,
                        'path': '{}/{}'.format(subdir, file)
                    }

    def bulk_create(self):
        for schema in self.schemas:
            if schema == '': continue
            self._push_to_database(self._build_create_schema_query(schema))
        self.commit_to_database()

    def gs_connect_geoserver(self):
        self.cat = Catalog(config.GSREST,
                           username=config.GSUSER,
                           password=config.GSPASSWORD)

    def gs_create_workspace(self, ws):
        if self.cat.get_workspace(ws):
            logging.info('workspace {} exists skipping...'.format(ws))
        else:
            self.cat.create_workspace(ws)

    def gs_get_data_store(self, store):
        return self.cat.get_store(store)

    def gs_create_store(self, schema, workspace_name):
        try:
            self.cat.get_store(schema)
        except:
            ds = self.cat.create_datastore(schema, workspace_name)
            ds.connection_parameters.update({
                'host': config.PGHOST,
                'port': config.PGPORT,
                'database': config.PGDATABASE,
                'user': config.PGUSER,
                'passwd': config.PGPASSWORD,
                'dbtype': 'postgis',
                'schema': schema,
                'Expose primary keys': 'true'
            })
            self.cat.save(ds)

    def gs_publish_feature_type(self, table, store_name):
        try:
            data_store = self.gs_get_data_store(store_name)
        except:
            self.cat.publish_featuretype(table,
                                         data_store,
                                         'EPSG:2154',
                                         srs='EPSG:2154')

    @staticmethod
    def insert_data_from_shapefile(table, **kwargs):
        schema = kwargs['schema']
        path = kwargs['path']
        cmd = 'shp2pgsql -s {} {} "{}"."{}" | ' \
              'PGPASSWORD={} psql -h {} -d {} -U {}'.format(
            EPSG, path, schema, table, config.PGPASSWORD, config.PGHOST, config.PGDATABASE, config.PGUSER)
        subprocess.call(cmd, shell=True)

    @staticmethod
    def _build_create_table_query(dirname, shapefile):
        return 'CREATE TABLE {}.{};'.format(dirname, shapefile)

    @staticmethod
    def _build_create_schema_query(schema):
        return 'CREATE SCHEMA {};'.format(schema)

    def _push_to_database(self, query):
        self.cur.execute(query)

    def commit_to_database(self):
        self.conn.commit()
Beispiel #10
0
class wrap_geoserver:
    """ Geoserver (gsconfig) wrapper """
    def __init__(self,
                 geoserver_name,
                 username=username,
                 password=password,
                 easy=False):
        if geoserver_name in list(REST.keys()):
            self.path = REST[geoserver_name]
        else:
            self.path = geoserver_name
        self.wms = self.path.replace("rest/", "wms")

        self.name = geoserver_name
        self.catalog = Catalog(self.path, username, password)

        if not easy:
            self.layers = []
            self.layer_names = []

            for layer in self.catalog.get_layers():
                self.layers.append(layer)
                self.layer_names.append(layer.name)

            self.stores = [store for store in self.catalog.get_stores()]
            self.store_names = [store.name for store in self.stores]

            styles = []
            self.workspaces = []
            self.workspace_names = []

            for workspace in self.catalog.get_workspaces():
                styles = styles + self.catalog.get_styles(workspace)
                self.workspace_names.append(workspace._name)
                self.workspaces.append(workspace)

            self.styles = styles + [
                style for style in self.catalog.get_styles()
            ]
            self.style_names = [style.name for style in self.styles]

    def unpack(self, workspace_name, store_type="datastore"):
        layers_and_styles = {}
        features = []
        workspace = self.get_workspace(workspace_name)

        if store_type == "datastore":
            store_url = workspace.datastore_url
        elif store_type == "coveragestore":
            store_url = workspace.coveragestore_url
        else:
            print("No correct store given")

        for datastore in tqdm(get(store_url, "name")):
            url = "{}workspaces/{}/datastores/{}".format(
                self.path, workspace.name, datastore)
            features = features + get(url, between_quotes=True)

        for feature in features:
            layer_name = os.path.basename(feature).split(".")[0]
            self.get_layer(self.get_slug(workspace.name, layer_name))
            layers_and_styles[layer_name] = self.layer.default_style

        setattr(self, workspace_name + "_data", layers_and_styles)
        return layers_and_styles

    def get_layer(self, layer, easy=False):
        self.layer = self.catalog.get_layer(layer)

        if not easy:
            self.resource = self.layer.resource
            self.layer_name = self.layer.resource.name
            self.sld_name = self.layer.default_style.name
            self.sld_body = self.layer.default_style.sld_body
            self.layer_latlon_bbox = self.layer.resource.latlon_bbox
            self.layer_title = self.layer.resource.title
            self.layer_abstract = self.layer.resource.abstract

    def get_store(self, layer):
        self.store = self.layer.resource._store

    def get_resource(self):
        self.resource = self.catalog.get_resource(self.layer.name, self.store)

    def get_workspace(self, workspace_name):
        self.workspace = self.catalog.get_workspace(workspace_name)
        self.workspace_name = self.workspace._name
        return self.workspace

    def write_abstract(self, data, load_resource=True):
        if load_resource:
            self.get_resource()
        self.resource.abstract = data
        self.catalog.save(self.resource)

    def write_title(self, title):
        self.resource.title = title
        self.catalog.save(self.resource)

    def get_connection_parameters(self):
        self.get_resource()
        return self.resource.store.connection_parameters

    def create_workspace(self, workspace_name):
        workspace_exists = workspace_name in self.workspace_names

        if not workspace_exists:
            self.workspace = self.catalog.create_workspace(workspace_name)

        else:
            print("workspace already exists, using existing workspace")

        self.workspace = self.catalog.get_workspace(workspace_name)
        self.workspace_name = workspace_name

    def create_postgis_datastore(self, store_name, workspace_name, pg_data):

        try:
            self.store = self.catalog.get_store(store_name,
                                                self.workspace_name)
            print("store within workspace exists, using existing store")

        except Exception as e:
            print(e)

            ds = self.catalog.create_datastore(store_name, workspace_name)
            ds.connection_parameters.update(
                host=pg_data["host"],
                port=pg_data["port"],
                database=pg_data["database"],
                user=pg_data["username"],
                passwd=pg_data["password"],
                dbtype="postgis",
                schema="public",
            )

            self.save(ds)
            self.store = self.catalog.get_store(store_name,
                                                self.workspace_name)
            self.store_name = store_name

    def publish_layer(self,
                      layer_name,
                      workspace_name,
                      overwrite=False,
                      epsg="3857",
                      reload=False):

        layer_exists = layer_name in self.layer_names
        # if layer_name in self.workspace_layers[workspace_name]:
        slug = self.get_slug(workspace_name, layer_name)
        if overwrite and layer_exists:
            print("Layer exists, deleting layer")
            try:

                self.layer = self.catalog.get_layer(slug)
                self.delete(self.layer)
                self.reload()

                layer_exists = False

            except Exception as e:
                print(e)
                print("Layer does not exist in workspace")
                layer_exists = False

        if not layer_exists:

            feature_type = self.catalog.publish_featuretype(
                layer_name,
                self.store,
                "EPSG:{}".format(str(epsg)),
                srs="EPSG:{}".format(str(epsg)),
            )
            self.save(feature_type)
            self.feature_type = feature_type

        else:
            print("layer already exists, using existing layer")

        if reload:
            self.get_layer(slug)

        self.layer_name = layer_name

    def publish_layergroup(self,
                           name,
                           layers,
                           styles=(),
                           bounds=None,
                           workspace=None):
        layer_group = self.catalog.create_layergroup(name, layers, styles,
                                                     bounds, workspace)
        self.save(layer_group)

    def save(self, save_object):
        return self.catalog.save(save_object)

    def close(self):
        self.catalog = None

    def delete(self, delete_object):
        self.catalog.delete(delete_object)

    def reload(self):
        self.catalog.reload()

    def upload_shapefile(self, layer_name, shapefile_path):
        path = shapefile_path.split(".shp")[0]
        shapefile = shapefile_and_friends(path)
        ft = self.catalog.create_featurestore(layer_name, shapefile,
                                              self.workspace)
        self.save(ft)

    def upload_sld(self, sld_name, workspace_name, sld, overwrite=True):
        style_exists = sld_name in self.style_names

        if overwrite and style_exists:
            print("Overwriting style")
            style = self.catalog.get_style(sld_name, workspace_name)
            self.delete(style)
            self.reload()
            style_exists = False

        if not style_exists:
            try:
                self.catalog.create_style(sld_name, sld, False, workspace_name,
                                          "sld11")
            except Exception as e:
                print(e)

                style = self.catalog.get_style(sld_name, workspace_name)
                self.delete(style)
                self.reload()
                self.catalog.create_style(sld_name, sld, False, workspace_name,
                                          "sld10")
            self.style_name = sld_name

        else:
            if style_exists:
                print("Style already exists, using current style")
                self.style_name = sld_name

    def set_sld_for_layer(self,
                          workspace_name=None,
                          style_name=None,
                          use_custom=False):
        if not use_custom:
            workspace_name = self.workspace_name
            style_name = self.style_name
            self.style_slug = self.get_slug(workspace_name, style_name)

        else:
            if workspace_name is None:
                self.style_slug = style_name
            else:
                self.style_slug = self.get_slug(workspace_name, style_name)

        self.style = self.catalog.get_style(self.style_slug)

        print("Setting {} for {}".format(self.style.name, self.layer.name))
        self.layer.default_style = self.style
        self.save(self.layer)

    def get_slug(self, workspace, name):
        return "{}:{}".format(workspace, name)

    def get_slug_data(self, slug):
        workspace_name = slug.split(":")[0]
        layer_name = slug.split(":")[1]
        return workspace_name, layer_name

    def get_sld(self, layer_slug=None):
        if layer_slug is None:
            self.style = self.catalog.get_style(self.layer_slug)
        else:
            self.style = self.catalog.get_style(layer_slug)

        self.sld_body = self.style.sld_body
        return self.sld_body

    def get_layer_workspace(self, layer_name):
        return self.catalog.get_layer(layer_name).resource.workspace.name
Beispiel #11
0
class GsConn():
    def __init__(self, host, login, password, debug=False):
        """
        Geoserver connection
        """
        self.host = host
        self.login = login
        self.password = password
        self.debug = debug

        # Connect to server
        self.cat = Catalog("http://%s/geoserver/rest" % host, login, password)
        if self.debug is True:
            print "Connected to geoserver"

    def crate_workspace(self, name, uri, overwrite=False):
        """
        Creates a workspace
        :param name: Workspace name.
        :param overwrite: If True, delete existing workspace.
        :return: None
        """
        workspaces = [
            workspace.name for workspace in self.cat.get_workspaces()
        ]

        if name in workspaces and overwrite is True:
            # ws2del = self.cat.get_workspace(name)
            # self.cat.delete(ws2del, purge=True, recurse=True)
            return None  # NOTE: If we delete the workspace then all associated layers are lost.
        elif name in workspaces and overwrite is False:
            print "ERROR: Workspace %s already exists (use overwrite=True)." % name

        self.cat.create_workspace(name, uri)
        if self.debug is True:
            print "Workspace %s available." % name

        ws = self.cat.get_workspace(name)
        ws.enabled = True

    def create_pg_store(self,
                        name,
                        workspace,
                        host,
                        port,
                        login,
                        password,
                        dbname,
                        schema,
                        overwrite=False):
        """
        Creates datastore.
        :param name: Name of the datastore.
        :param workspace: Name of the workspace to use.
        :param overwrite: If True replace datastore.
        :return: None
        """
        stores = [store.name for store in self.cat.get_stores()]

        if name in stores and overwrite is True:
            # st2del = self.cat.get_store(name)
            # self.cat.delete(st2del, purge=True, recurse=True)
            # self.cat.reload()
            return None  # NOTE: If we delete store, every layers associated with are lost.
        elif name in stores and overwrite is False:
            print "ERROR: Store %s already exists (use overwrite=True)." % name

        ds = self.cat.create_datastore(name, workspace)
        ds.connection_parameters.update(host=host,
                                        port=port,
                                        user=login,
                                        passwd=password,
                                        dbtype='postgis',
                                        database=dbname,
                                        schema=schema)
        self.cat.save(ds)

        ds = self.cat.get_store(name)
        if ds.enabled is False:
            print "ERROR: Geoserver store %s not enabled" % name

        if self.debug is True:
            print "Datastore %s created." % name

    def publish_pg_layer(self,
                         layer_table,
                         layer_name,
                         store,
                         srid,
                         overwrite=True):
        """
        """
        existing_lyr = self.cat.get_layer("participatubes:%s" % layer_table)
        if existing_lyr is not None:
            print "Layer participatubes:%s already exists, deleting it." % layer_table
            self.cat.delete(existing_lyr)
            self.cat.reload()

        ds = self.cat.get_store(store)
        ft = self.cat.publish_featuretype(layer_table,
                                          ds,
                                          'EPSG:%s' % srid,
                                          srs='EPSG:4326')
        ft.projection_policy = "REPROJECT_TO_DECLARED"
        ft.title = layer_name
        self.cat.save(ft)

        if ft.enabled is False:
            print "ERROR: Layer %s %s %s is not enabled." (ft.workspace.name,
                                                           ft.store.name,
                                                           ft.title)

        if self.debug is True:
            print "Layer %s>%s>%s published." % (ft.workspace.name,
                                                 ft.store.name, ft.title)

    def create_style_from_sld(self,
                              style_name,
                              sld_file,
                              workspace,
                              overwrite=True):
        """
        """
        if self.cat.get_style(style_name) is not None:
            print "Style %s already exists, deleting it." % style_name
            style2del = self.cat.get_style(style_name)
            self.cat.delete(style2del)

        self.cat.create_style(
            style_name, open(sld_file).read(), overwrite=overwrite
        )  # FIXME: if ", workspace=workspace" specified can't delete style

        if self.debug is True:
            print "Style %s created in Geoserver" % style_name

    def apply_style_to_layer(self, layer_name, style_name):
        """
        Apply a geoserver styler to a layer
        """
        gs_layer = self.cat.get_layer(layer_name)
        gs_style = self.cat.get_style(style_name)

        # FIXME: Which works better?
        # gs_layer.default_style = gs_style / gs_layer._set_default_style(gs_style)
        # FIXME: Maybe indicate workspace when saving style then name the style as "workspace:style"
        gs_layer._set_default_style(gs_style)
        self.cat.save(gs_layer)

        if self.debug is True:
            print "Style applied to %s" % layer_name
Beispiel #12
0
def create_point_col_from_lat_lon(new_table_owner, table_name, lat_column,
                                  lng_column):
    """
    Using a new DataTable and specified lat/lng column names, map the points

    :param new_table_owner:
    :param table_name:
    :param lat_column:
    :param lng_column:
    :return:
    """

    LOGGER.info('create_point_col_from_lat_lon')
    assert isinstance(new_table_owner,
                      User), "new_table_owner must be a User object"
    assert table_name is not None, "table_name cannot be None"
    assert lat_column is not None, "lat_column cannot be None"
    assert lng_column is not None, "lng_column cannot be None"

    # ----------------------------------------------------
    # Retrieve the DataTable and check for lat/lng columns
    # ----------------------------------------------------
    try:
        dt = DataTable.objects.get(table_name=table_name)
    except DataTable.DoesNotExist:
        err_msg = "Could not find DataTable with name: %s" % (table_name)
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Latitude attribute
    # ----------------------------------------------------
    lat_col_attr = dt.get_attribute_by_name(
        standardize_column_name(lat_column))
    if lat_col_attr is None:
        err_msg = 'DataTable "%s" does not have a latitude column named "%s" (formatted: %s)'\
                  % (table_name, lat_column, standardize_column_name(lat_column))
        LOGGER.error(err_msg)
        return False, err_msg

    is_valid, err_msg = is_valid_lat_lng_attribute(lat_col_attr)
    if not is_valid:
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Longitude attribute
    # ----------------------------------------------------
    lng_col_attr = dt.get_attribute_by_name(
        standardize_column_name(lng_column))
    if lng_col_attr is None:
        err_msg = 'DataTable "%s" does not have a longitude column named "%s" (formatted: %s)'\
                  % (table_name, lng_column, standardize_column_name(lng_column))
        LOGGER.error(err_msg)
        return False, err_msg

    is_valid, err_msg = is_valid_lat_lng_attribute(lng_col_attr,
                                                   lng_check=True)
    if not is_valid:
        LOGGER.error(err_msg)
        return False, err_msg

    # ----------------------------------------------------
    # Start mapping record
    # ----------------------------------------------------
    lat_lnt_map_record = LatLngTableMappingRecord(datatable=dt\
                                , lat_attribute=lat_col_attr\
                                , lng_attribute=lng_col_attr\
                                )

    msg('create_point_col_from_lat_lon - 2')

    # ----------------------------------------------------
    # Yank bad columns out of the DataTable
    # ----------------------------------------------------
    # See https://github.com/IQSS/dataverse/issues/2949

    (success,
     row_cnt_or_err_msg) = remove_bad_lat_lng_numbers(lat_lnt_map_record)
    if not success:
        if lat_lnt_map_record.id:
            lat_lnt_map_record.delete()
        return False, 'Failed to remove bad lat/lng values.'

    # The bad rows were not mapped
    lat_lnt_map_record.unmapped_record_count = row_cnt_or_err_msg

    # ---------------------------------------------
    # Format SQL to:
    #   (a) Add the Geometry column to the Datatable
    #   (b) Populate the column using the lat/lng attributes
    #   (c) Create column index
    # ---------------------------------------------
    # (a) Add column SQL
    alter_table_sql = "ALTER TABLE %s ADD COLUMN geom geometry(POINT,4326);" % (
        table_name)  # postgi 2.x

    # (b) Populate column SQL
    update_table_sql = "UPDATE %s SET geom = ST_SetSRID(ST_MakePoint(%s,%s),4326);" \
                    % (table_name, lng_col_attr.attribute, lat_col_attr.attribute)
    #update_table_sql = "UPDATE %s SET geom = ST_SetSRID(ST_MakePoint(cast(%s AS float), cast(%s as float)),4326);" % (table_name, lng_column, lat_column)

    # (c) Index column SQL
    create_index_sql = "CREATE INDEX idx_%s_geom ON %s USING GIST(geom);" % (
        table_name, table_name)

    # ---------------------------------------------
    # Run the SQL
    # ---------------------------------------------
    try:
        conn = psycopg2.connect(
            get_datastore_connection_string(is_dataverse_db=False))

        cur = conn.cursor()

        LOGGER.debug('Run alter table SQL: %s', alter_table_sql)
        cur.execute(alter_table_sql)

        LOGGER.debug('Run update table SQL: %s', update_table_sql)
        cur.execute(update_table_sql)

        LOGGER.debug('Run create index SQL: %s', create_index_sql)
        cur.execute(create_index_sql)

        conn.commit()
        conn.close()
    except Exception as ex_obj:
        conn.close()
        err_msg = "Error Creating Point Column from Latitude and Longitude %s" % (
            ex_obj)
        LOGGER.error(err_msg)
        return False, err_msg

    msg('create_point_col_from_lat_lon - 4')

    # ------------------------------------------------------
    # Create the Layer in GeoServer from the table
    # ------------------------------------------------------
    try:
        cat = Catalog(settings.GEOSERVER_BASE_URL + "rest",\
                    settings.GEOSERVER_CREDENTIALS[0],\
                    settings.GEOSERVER_CREDENTIALS[1])
        #      "admin", "geoserver")
        workspace = cat.get_workspace("geonode")
        ds_list = cat.get_xml(workspace.datastore_url)
        datastores = [
            datastore_from_index(cat, workspace, n)
            for n in ds_list.findall("dataStore")
        ]
        #----------------------------
        # Find the datastore
        #----------------------------
        ds = None
        from geonode.maps.utils import get_db_store_name
        for datastore in datastores:
            if datastore.name == get_db_store_name():
                ds = datastore

        if ds is None:
            err_msg = "Datastore not found: '%s' (lat/lng)" % (
                settings.DB_DATASTORE_NAME)
            return False, err_msg
        ft = cat.publish_featuretype(table_name,
                                     ds,
                                     "EPSG:4326",
                                     srs="EPSG:4326")
        cat.save(ft)
    except Exception as e:
        lat_lnt_map_record.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoServer layer for %s: %s" % (table_name,
                                                                 str(e))
        return False, err_msg

    msg('create_point_col_from_lat_lon - 5 - add style')

    # ------------------------------------------------------
    # Set the Layer's default Style
    # ------------------------------------------------------
    set_default_style_for_latlng_layer(cat, ft)

    # ------------------------------------------------------
    # Create the Layer in GeoNode from the GeoServer Layer
    # ------------------------------------------------------
    try:
        layer, created = Layer.objects.get_or_create(
            name=table_name,
            defaults={
                "workspace":
                workspace.name,
                "store":
                ds.name,
                "storeType":
                ds.resource_type,
                "typename":
                "%s:%s" %
                (workspace.name.encode('utf-8'), ft.name.encode('utf-8')),
                "title":
                dt.title or 'No title provided',
                #"name" : dt.title or 'No title provided',
                "abstract":
                dt.abstract or 'No abstract provided',
                "uuid":
                str(uuid.uuid4()),
                "owner":
                new_table_owner,
                #"bbox_x0": Decimal(ft.latlon_bbox[0]),
                #"bbox_x1": Decimal(ft.latlon_bbox[1]),
                #"bbox_y0": Decimal(ft.latlon_bbox[2]),
                #"bbox_y1": Decimal(ft.latlon_bbox[3])
            })
        #set_attributes(layer, overwrite=True)
    except Exception as e:
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoNode layer for %s: %s" % (table_name,
                                                               str(e))
        return False, err_msg

    # ----------------------------------
    # Set default permissions (public)
    # ----------------------------------
    layer.set_default_permissions()

    # ------------------------------------------------------------------
    # Create LayerAttributes for the new Layer (not done in GeoNode 2.x)
    # ------------------------------------------------------------------
    (attributes_created,
     err_msg) = create_layer_attributes_from_datatable(dt, layer)
    if not attributes_created:
        LOGGER.error(err_msg)
        layer.delete()  # Delete the layer
        return False, "Sorry there was an error creating the Datatable. (s:ll)"

    # ----------------------------------
    # Save a related LatLngTableMappingRecord
    # ----------------------------------
    lat_lnt_map_record.layer = layer

    # ----------------------------------
    # Retrieve matche/unmatched counts
    # ----------------------------------
    # Get datatable feature count - total record to map
    (success_datatable,
     datatable_feature_count) = get_layer_feature_count(dt.table_name)

    # Get layer feature count - mapped records
    (success_layer, layer_feature_count) = get_layer_feature_count(layer.name)

    # Set Record counts
    if success_layer and success_datatable:
        lat_lnt_map_record.mapped_record_count = layer_feature_count
        new_misssed_records = datatable_feature_count - layer_feature_count
        if lat_lnt_map_record.unmapped_record_count and lat_lnt_map_record.unmapped_record_count > 0:
            lat_lnt_map_record.unmapped_record_count += new_misssed_records
        else:
            lat_lnt_map_record.unmapped_record_count = new_misssed_records

    else:
        LOGGER.error('Failed to calculate Lat/Lng record counts')

    lat_lnt_map_record.save()

    return True, lat_lnt_map_record
Beispiel #13
0
class GsConn:
    def __init__(self, host, login, password, debug=False):
        """
        Geoserver connection
        """
        self.host = host
        self.login = login
        self.password = password
        self.debug = debug

        # Connect to server
        self.cat = Catalog("http://%s/geoserver/rest" % host, login, password)
        if self.debug is True:
            print "Connected to geoserver"

    def crate_workspace(self, name, overwrite=False):
        """
        Creates a workspace
        :param name: Workspace name.
        :param overwrite: If True, delete existing workspace.
        :return: None
        """
        workspaces = [workspace.name for workspace in self.cat.get_workspaces()]

        if name in workspaces and overwrite is True:
            # ws2del = self.cat.get_workspace(name)
            # self.cat.delete(ws2del, purge=True, recurse=True)
            return None  # NOTE: If we delete the workspace then all associated layers are lost.
        elif name in workspaces and overwrite is False:
            print "ERROR: Workspace %s already exists (use overwrite=True)." % name

        self.cat.create_workspace(name, "http://%s/%s" % (self.host, name))
        if self.debug is True:
            print "Workspace %s available." % name

        ws = self.cat.get_workspace(name)
        ws.enabled = True

    def create_pg_store(self, name, workspace, host, port, login, password, dbname, schema, overwrite=False):
        """
        Creates datastore.
        :param name: Name of the datastore.
        :param workspace: Name of the workspace to use.
        :param overwrite: If True replace datastore.
        :return: None
        """
        stores = [store.name for store in self.cat.get_stores()]

        if name in stores and overwrite is True:
            # st2del = self.cat.get_store(name)
            # self.cat.delete(st2del, purge=True, recurse=True)
            # self.cat.reload()
            return None  # NOTE: If we delete store, every layers associated with are lost.
        elif name in stores and overwrite is False:
            print "ERROR: Store %s already exists (use overwrite=True)." % name

        ds = self.cat.create_datastore(name, workspace)
        ds.connection_parameters.update(
            host=host, port=port, user=login, passwd=password, dbtype="postgis", database=dbname, schema=schema
        )
        self.cat.save(ds)

        ds = self.cat.get_store(name)
        if ds.enabled is False:
            print "ERROR: Geoserver store %s not enabled" % name

        if self.debug is True:
            print "Datastore %s created." % name

    def publish_pg_layer(self, layer_table, layer_name, store, srid, overwrite=True):
        """
        """
        existing_lyr = self.cat.get_layer("ma_carte:%s" % layer_table)
        if existing_lyr is not None:
            print "Layer ma_carte:%s already exists, deleting it." % layer_table
            self.cat.delete(existing_lyr)
            self.cat.reload()

        ds = self.cat.get_store(store)
        ft = self.cat.publish_featuretype(layer_table, ds, "EPSG:%s" % srid, srs="EPSG:4326")
        ft.projection_policy = "REPROJECT_TO_DECLARED"
        ft.title = layer_name
        self.cat.save(ft)

        if ft.enabled is False:
            print "ERROR: Layer %s %s %s is not enabled."(ft.workspace.name, ft.store.name, ft.title)

        if self.debug is True:
            print "Layer %s>%s>%s published." % (ft.workspace.name, ft.store.name, ft.title)

    def create_style_from_sld(self, style_name, sld_file, workspace, overwrite=True):
        """
        """
        if self.cat.get_style(style_name) is not None:
            print "Style %s already exists, deleting it." % style_name
            style2del = self.cat.get_style(style_name)
            self.cat.delete(style2del)

        self.cat.create_style(
            style_name, open(sld_file).read(), overwrite=overwrite
        )  # FIXME: if ", workspace=workspace" specified can't delete style

        if self.debug is True:
            print "Style %s created in Geoserver" % style_name

    def apply_style_to_layer(self, layer_name, style_name):
        """
        Apply a geoserver styler to a layer
        """
        gs_layer = self.cat.get_layer(layer_name)
        gs_style = self.cat.get_style(style_name)

        # FIXME: Which works better?
        # gs_layer.default_style = gs_style / gs_layer._set_default_style(gs_style)
        # FIXME: Maybe indicate workspace when saving style then name the style as "workspace:style"
        gs_layer._set_default_style(gs_style)
        self.cat.save(gs_layer)

        if self.debug is True:
            print "Style applied to %s" % layer_name
def publishGeoserver(appdef, progress):
	viewCrs = appdef["Settings"]["App view CRS"]
	usesGeoServer = False
	for applayer in appdef["Layers"]:
		if applayer.method != METHOD_FILE:
			if applayer.layer.type() == applayer.layer.VectorLayer and applayer.layer.providerType().lower() != "wfs":
				usesGeoServer = True
	if not usesGeoServer:
		return
	progress.setText("Publishing to GeoServer")
	progress.setProgress(0)
	geoserverUrl = appdef["Deploy"]["GeoServer url"] + "/rest"
	geoserverPassword = appdef["Deploy"]["GeoServer password"]
	geoserverUsername = appdef["Deploy"]["GeoServer username"]
	workspaceName = appdef["Deploy"]["GeoServer workspace"]
	dsName = "ds_" + workspaceName
	host = appdef["Deploy"]["PostGIS host"]
	port = appdef["Deploy"]["PostGIS port"]
	postgisUsername = appdef["Deploy"]["PostGIS username"]
	postgisPassword = appdef["Deploy"]["PostGIS password"]
	database = appdef["Deploy"]["PostGIS database"]
	schema = appdef["Deploy"]["PostGIS schema"]
	catalog = Catalog(geoserverUrl, geoserverUsername, geoserverPassword)
	workspace = catalog.get_workspace(workspaceName)
	if workspace is None:
		workspace = catalog.create_workspace(workspaceName, workspaceName)
	try:
		store = catalog.get_store(dsName, workspace)
		resources = store.get_resources()
		for resource in resources:
			layers = catalog.get_layers(resource)
			for layer in layers:
				catalog.delete(layer)
			catalog.delete(resource)
		catalog.delete(store)
	except Exception:
		pass
	try:
		store = catalog.get_store(dsName, workspace)
	except FailedRequestError:
		store = None
	for i, applayer in enumerate(appdef["Layers"]):
		layer = applayer.layer
		if applayer.method != METHOD_FILE and applayer.method != METHOD_DIRECT:
			name = safeName(layer.name())
			sld, icons = getGsCompatibleSld(layer)
			if sld is not None:
				catalog.create_style(name, sld, True)
				uploadIcons(icons, geoserverUsername, geoserverPassword, catalog.gs_base_url)
			if layer.type() == layer.VectorLayer:
				if applayer.method == METHOD_WFS_POSTGIS or applayer.method == METHOD_WMS_POSTGIS:
					if store is None:
						store = catalog.create_datastore(dsName, workspace)
						store.connection_parameters.update(
							host=host, port=str(port), database=database, user=postgisUsername, schema=schema,
							passwd=postgisPassword, dbtype="postgis")
						catalog.save(store)
					catalog.publish_featuretype(name, store, layer.crs().authid())
				else:
					path = getDataFromLayer(layer, viewCrs)
					catalog.create_featurestore(name,
													path,
													workspace=workspace,
													overwrite=True)
				gslayer = catalog.get_layer(name)
				r = gslayer.resource
				r.dirty['srs'] = viewCrs
				catalog.save(r)
			elif layer.type() == layer.RasterLayer:
				path = getDataFromLayer(layer, viewCrs)
				catalog.create_coveragestore(name,
				                          path,
				                          workspace=workspace,
				                          overwrite=True)
			if sld is not None:
				publishing = catalog.get_layer(name)
				publishing.default_style = catalog.get_style(name)
				catalog.save(publishing)
		progress.setProgress(int((i+1)*100.0/len(appdef["Layers"])))
Beispiel #15
0
    def processAlgorithm(self, parameters, context, model_feedback):
        """
        Process the algorithm
        :param parameters: parameters of the process
        :param context: context of the process
        :param model_feedback: feedback instance for the process
        :return:
        """
        # Use a multi-step feedback, so that individual child algorithm progress reports are adjusted for the
        # overall progress through the model
        self.xml_path = parameters["XML_PATH"]
        if not self.xml_path.lower().endswith(".xml"):
            feedback = QgsProcessingMultiStepFeedback(0, model_feedback)
            feedback.reportError(
                "XML Workspace Definition is not an XML file!", True)
            return {}

        db_name = parameters["DB_NAME"]
        db_params = self.get_db_params(db_name)
        store_name = parameters["GS_STORE"]
        wrk_name = parameters["GS_WORKSPACE"]

        dataset_list = []
        datasets = self.getDatasets()
        for dataset in datasets:
            type = dataset.getElementsByTagName(
                TAG_DE_TYPE)[0].childNodes[0].data
            if type == "esriDTFeatureClass":
                ds_name = dataset.getElementsByTagName(
                    TAG_DE_NAME)[0].childNodes[0].data
                dataset_list.append({
                    "name": ds_name.lower(),
                    "srs": "EPSG:4326"
                })

        feedback = QgsProcessingMultiStepFeedback(2 + len(dataset_list),
                                                  model_feedback)

        feedback.pushInfo("Get GeoServer Catalog: " +
                          parameters["GS_REST_URL"])
        gs_catalogue = Catalog(parameters["GS_REST_URL"],
                               parameters["GS_ADMIN"],
                               parameters["GS_PASSWORD"])

        # workspace
        if wrk_name == "" or wrk_name is None:
            wrk_name = db_name.lower() + "_ws"
        wrk_uri = "http://" + wrk_name
        feedback.pushInfo("GeoServer Workspace: " + wrk_name + " (" + wrk_uri +
                          ")")
        workspace = gs_catalogue.get_workspace(wrk_name)
        if workspace is None:
            workspace = gs_catalogue.create_workspace(wrk_name, wrk_uri)
        feedback.setCurrentStep(1)

        # store
        if store_name == "" or store_name is None:
            store_name = db_name.lower() + "_ds"
        feedback.pushInfo("GeoServer Data Store: " + store_name)
        store = gs_catalogue.get_store(store_name, workspace)
        if store is None:
            store = gs_catalogue.create_datastore(store_name, workspace)
            store.connection_parameters.update(**db_params)
            gs_catalogue.save(store)
        feedback.setCurrentStep(2)

        step = 2
        published_count = 0
        layer_prefix = "v_"
        for ds_cur in dataset_list:
            layer_name = layer_prefix + ds_cur["name"]
            feedback.pushInfo("GeoServer Publish: " + layer_name + " (" +
                              ds_cur["srs"] + ")")
            try:
                layer = gs_catalogue.get_layer(layer_name)
                if layer is not None:
                    gs_catalogue.delete(layer)
                    gs_catalogue.save()
                gs_catalogue.publish_featuretype(layer_name, store,
                                                 ds_cur["srs"])
                published_count += 1
            except Exception as e:
                feedback.reportError("Error: " + str(e), False)
            step += 1
            feedback.setCurrentStep(step)

        gs_catalogue.reload()
        layers = gs_catalogue.get_layers(store)
        feedback.pushInfo("-" * 80)
        feedback.pushInfo("Published layers: " + str(published_count))
        for layer in layers:
            feedback.pushInfo(layer.name + " is published!")
        feedback.pushInfo("-" * 80)
        results = {}
        outputs = {}
        return results
Beispiel #16
0
def calccosts(cf, layer_name, json_matrix):
    new_layer = "classroads_{}".format(int(1000000 * time.time()))

    # calccost function calculates recalcutes the visualisation caterogies
    engine = create_engine(
        "postgresql+psycopg2://"
        + cf.get("PostGis", "user")
        + ":"
        + cf.get("PostGis", "pass")
        + "@"
        + cf.get("PostGis", "host")
        + ":"
        + str(cf.get("PostGis", "port"))
        + "/"
        + cf.get("PostGis", "db"),
        strategy="threadlocal",
        pool_pre_ping=True,
    )

    # try:  # TODO make the ra2ce schema name as a parameter (input to the service)
    strSql = """create table temp.{new_layer} as select geom, societal_class, repair_class, 0 as visclass from public.{layer_name}_priority;""".format(
        new_layer=new_layer, layer_name=layer_name
    )
    ressql = engine.execute(strSql)
    print("create temp table", ressql)
    # except exc.DBAPIError as e:
    # an exception is raised, Connection is invalidated.
    # if e.connection_invalidated:
    # logging.info("Connection was invalidated!")

    data = json.loads(json_matrix)
    values = data["values"]
    for societalIndex in range(len(values)):
        for repairIndex in range(len(values[societalIndex])):
            strSql = """update temp.{new_layer} set visclass = {val}
                            where societal_class = {s} and repair_class = {r};""".format(
                new_layer=new_layer,
                val=values[societalIndex][repairIndex],
                s=societalIndex + 1,
                r=repairIndex + 1,
            )
            ressql = engine.execute(strSql)
        ressql.close()

    # publish the layer in geoserver
    cat = Catalog(
        "{}/rest/".format(cf.get("GeoServer", "host")),
        username=cf.get("GeoServer", "user"),
        password=cf.get("GeoServer", "pass"),
    )
    print("cat", cat)
    datastore = cat.get_store("temp")
    ft = cat.publish_featuretype(
        new_layer, datastore, native_crs="EPSG:4326", jdbc_virtual_table=""
    )
    cat.save(ft)
    layer = cat.get_layer(new_layer)
    print("layer", layer)
    # check if shapefile is line or point
    # TODO check styles with Gerrit

    style = "ra2ce"

    # set style and save layer
    # layer._set_default_style(style)
    layer.default_style = style
    cat.save(layer)

    res = writeOutput(cf=cf, wmslayer="ra2ce-dr:{}".format(new_layer), defstyle=style)
    # time.sleep(2)
    return res
Beispiel #17
0
def delete(uid):
    """A function to delete a deviation map, all its tables, files and GeoServer layers.

    GET request: Renders and returns a site showing delete options.

    POST request: Gets delete options chosen by user and uses them to delete the chosen parts of the deviation map.
    To delete GeoServer layers the GeoServer configuration client library is used.
    """
    uid = uid.encode("ISO-8859-1")
    if request.method == "POST":
        dm = DevMap.query.filter_by(uid=uid).first()
        if (
            current_user.is_authenticated()
            and dm.owner == current_user
            or dm.owner == User.query.filter_by(username="******").first()
        ):
            if (
                dm.wmsposdevlines
                or dm.wmsmaxdevgrid
                or dm.wmsabsdevgrid
                or dm.wmsmatchingrategrid
                or dm.wmsunmatchedref
                or dm.wmsunmatchedosm
                or dm.wmsmatchedref
                or dm.wmsmatchedosm
                or dm.wmsminlevenshtein
                or dm.wmsmaxlevenshtein
            ):
                cat = Catalog(gs_url + "rest")
                cat.username = gs_user
                cat.password = gs_password
                ws = None
                try:
                    ws = cat.get_workspace(gs_workspace)
                except socket.error, e:
                    detail = "GeoServer is not available. Make sure that it is running and the connection is ok."
                    return render_template("error.html", err=e, detail=detail)

                st = cat.get_store(gs_store, ws)
                if "deletemaxdevgrid" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_maxdevgrid"
                    if dm.wmsmaxdevgrid:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsmaxdevgrid = False
                if "deleteposdevlines" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_posdevlines"
                    if dm.wmsposdevlines:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsposdevlines = False
                if "deleteabsdevgrid" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_absdevgrid"
                    if dm.wmsabsdevgrid:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                        if ft is not None:
                            cat.delete(ft)
                        dm.wmsabsdevgrid = False
                if "deletematchingrategrid" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_matchingrategrid"
                    if dm.wmsmatchingrategrid:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsmatchingrategrid = False
                if "deleteunmatchedref" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_unmatchedref"
                    if dm.wmsunmatchedref:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.deleteunmatchedref = False
                if "deleteunmatchedosm" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_unmatchedosm"
                    if dm.wmsunmatchedosm:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsunmatchedosm = False
                if "deletematchedref" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_matchedref"
                    if dm.wmsmatchedref:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsmatchedref = False
                if "deletematchedosm" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_matchedosm"
                    if dm.wmsmatchedosm:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsmatchedosm = False
                if "deleteminlevenshtein" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_minlevenshtein"
                    if dm.wmsminlevenshtein:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsminlevenshtein = False
                if "deletemaxlevenshtein" in request.form or "deleteall" in request.form:
                    feattype = "odf_" + uid + "_maxlevenshtein"
                    if dm.wmsmaxlevenshtein:
                        l = cat.get_layer(feattype)
                        if l is not None:
                            cat.delete(l)
                        dm.wmsmaxlevenshtein = False

            if "deleteall" in request.form:
                folder = secure_filename(uid)
                folder = os.path.join(app.config["UPLOAD_FOLDER"], folder)
                shutil.rmtree(folder, True)
                db.engine.execute("drop table if exists odf_" + uid + "_ref")
                db.engine.execute("drop table if exists odf_" + uid + "_ref_presplitted")
                db.engine.execute("drop table if exists odf_" + uid + "_ref_splitted")
                db.engine.execute("drop table if exists odf_" + uid + "_found")
                db.engine.execute("drop table if exists odf_" + uid + "_ref_junctions")
                db.engine.execute("drop table if exists odf_" + uid + "_ref_points")
                db.engine.execute("drop table if exists odf_" + uid + "_ref_cutpoints")
                db.engine.execute("drop table if exists odf_" + uid + "_ref_cutcheckpoints")
                db.engine.execute("drop table if exists odf_" + uid + "_osm")
                db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted")
                db.engine.execute("drop table if exists odf_" + uid + "_osm_splitted")
                db.engine.execute("drop table if exists odf_" + uid + "_osm_junctions")
                db.engine.execute("drop table if exists odf_" + uid + "_osm_points")
                db.engine.execute("drop table if exists odf_" + uid + "_osm_cutpoints")
                db.engine.execute("drop table if exists odf_" + uid + "_osm_cutcheckpoints")
                db.engine.execute("drop table if exists odf_" + uid + "_unmatchedref;")
                db.engine.execute("drop table if exists odf_" + uid + "_unmatchedosm;")
                db.engine.execute("drop table if exists odf_" + uid + "_minlevenshtein;")
                db.engine.execute("drop table if exists odf_" + uid + "_maxlevenshtein;")
                db.engine.execute("drop table if exists odf_" + uid + "_grid;")
                db.engine.execute("drop table if exists odf_" + uid + "_maxdevgrid;")
                db.engine.execute("drop table if exists odf_" + uid + "_matchingrategrid;")
                db.engine.execute("drop table if exists odf_" + uid + "_deviationlines")
                db.engine.execute("drop table if exists odf_" + uid + "_junction_deviationlines")

                if DEBUG:
                    db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_cutcheckpoints")
                    db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_cutpoints")
                    db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_junctions")
                    db.engine.execute("drop table if exists odf_" + uid + "_osm_presplitted_points")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_cutcheckpoints")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_cutpoints")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_junction_devvec")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_junctions")
                    db.engine.execute("drop table if exists odf_" + uid + "_ref_corrected_presplitted_points")
                    db.engine.execute("drop table if exists odf_" + uid + "_result")

            if "deleteall" not in request.form:
                db.session.add(dm)
                db.session.commit()
                return render_template("delete.html", uid=uid, dm=dm, error=None)
            else:
                db.session.delete(dm)
                db.session.commit()
                return redirect(url_for("basic.index"))
        else:
            return render_template("error.html", err="You are not allowed to delete this map!")
Beispiel #18
0
def setup_join(new_table_owner, table_name, layer_typename,
               table_attribute_name, layer_attribute_name):
    LOGGER.info('setup_join')
    """
    Setup the Table Join in GeoNode
    """

    assert isinstance(new_table_owner,
                      User), "new_table_owner must be a User object"
    assert table_name is not None, "table_name cannot be None"
    assert layer_typename is not None, "layer_typename cannot be None"
    assert table_attribute_name is not None, "table_attribute_name cannot be None"
    assert layer_attribute_name is not None, "layer_attribute_name cannot be None"

    LOGGER.info('setup_join. Step (1): Retrieve the DataTable object')
    try:
        dt = DataTable.objects.get(table_name=table_name)
    except DataTable.DoesNotExist:
        err_msg = 'No DataTable object found for table_name "%s"' % table_name
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (2): Retrieve the Layer object')

    try:
        layer = Layer.objects.get(typename=layer_typename)
    except Layer.DoesNotExist:
        err_msg = 'No Layer object found for layer_typename "%s"' % layer_typename
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (3): Retrieve the DataTableAttribute object')
    try:
        table_attribute = DataTableAttribute.objects.get(\
                                        datatable=dt,
                                        attribute=table_attribute_name)
    except DataTableAttribute.DoesNotExist:
        err_msg = 'No DataTableAttribute object found for table/attribute (%s/%s)' \
                  % (dt,
                     table_attribute_name)
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (4): Retrieve the LayerAttribute object')
    try:
        layer_attribute = LayerAttribute.objects.get(\
                                layer=layer,
                                attribute=layer_attribute_name)
    except LayerAttribute.DoesNotExist:
        err_msg = 'No LayerAttribute object found for layer/attribute (%s/%s)'\
                  % (layer, layer_attribute_name)
        LOGGER.error(err_msg)
        return None, err_msg

    LOGGER.info('setup_join. Step (5): Build SQL statement to create view')

    layer_name = layer.typename.split(':')[1]

    # ------------------------------------------------------------------
    # (5) Check if the layer and the table are in the same store (database)
    # ------------------------------------------------------------------
    if layer.store != dt.tablespace:
        err_msg = 'layer (%s) and tablespace (%s) must be in the same database.'\
            % (layer.store, dt.tablespace)
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------------------
    # (5a) Check if the join columns compatible
    # ------------------------------------------------------------------
    column_checker = ColumnChecker(\
                                layer_name,
                                layer_attribute.attribute,
                                dt.table_name,
                                table_attribute.attribute)
    (are_cols_compatible,
     err_msg) = column_checker.are_join_columns_compatible()
    if not are_cols_compatible:  # Doesn't look good, return an error message
        return None, err_msg

    # ------------------------------------------------------------------
    # (5b) Create SQL statement for the tablejoin
    # ------------------------------------------------------------------
    view_name = get_unique_viewname(dt.table_name, layer_name)

    # SQL to create the view
    view_sql = ('CREATE VIEW {0}'
                ' AS SELECT {1}.{2}, {3}.*'
                ' FROM {1} INNER JOIN {3}'
                ' ON {1}."{4}" = {3}."{5}";'.format(\
                    view_name,  # 0
                    layer_name, # 1
                    THE_GEOM_LAYER_COLUMN, # 2
                    dt.table_name,  # 3
                    layer_attribute.attribute, # 4
                    table_attribute.attribute)) # 5

    # Materialized view for next version of Postgres
    """view_sql = ('create materialized view {0} as'
                   ' select {1}.the_geom, {2}.*'
                   ' from {1} inner join {2}'
                   ' on {1}."{3}" = {2}."{4}";').format(\
                       view_name,
                       layer_name,
                       dt.table_name,
                       layer_attribute.attribute,
                       table_attribute.attribute)
    """
    LOGGER.info('setup_join. Step (6): Retrieve stats')

    # ------------------------------------------------------------------
    # Retrieve stats
    # ------------------------------------------------------------------
    matched_count_sql = ('select count({0}) from {1} where {1}.{0}'
                         ' in (select "{2}" from {3});').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    unmatched_count_sql = ('select count({0}) from {1} where {1}.{0}'
                           ' not in (select "{2}" from {3});').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    unmatched_list_sql = ('select {0} from {1} where {1}.{0}'
                          ' not in (select "{2}" from {3}) limit 500;').format(\
                            table_attribute.attribute,
                            dt.table_name,
                            layer_attribute.attribute,
                            layer_name)

    # ------------------------------------------------------------------
    # Create a TableJoin object
    # ------------------------------------------------------------------
    LOGGER.info('setup_join. Step (7): Create a TableJoin object')
    tj, created = TableJoin.objects.get_or_create(\
                                source_layer=layer,
                                datatable=dt,
                                table_attribute=table_attribute,
                                layer_attribute=layer_attribute,
                                view_name=view_name,
                                view_sql=view_sql)
    tj.save()
    msgt('table join created! :) %s' % tj.id)

    # ------------------------------------------------------------------
    # Create the View (and double view)
    # ------------------------------------------------------------------
    LOGGER.info('setup_join. Step (8): Create the View (and double view)')

    # Convenience method to drop a view
    #
    drop_view_by_name(view_name)

    try:
        conn = psycopg2.connect(get_datastore_connection_string())
        cur = conn.cursor()

        # Create the new view
        #
        msg('view_sql: %s' % view_sql)
        cur.execute(view_sql)
        #cur.execute(double_view_sql)   # For later version of postgres

        # Record the counts for matched records and
        # add unmatched records to the TableJoin object

        # Unmatched count
        cur.execute(matched_count_sql)
        tj.matched_records_count = cur.fetchone()[0]

        # Matched count
        cur.execute(unmatched_count_sql)
        tj.unmatched_records_count = int(cur.fetchone()[0])

        # Unmatched records list
        if tj.unmatched_records_count > 0:
            cur.execute(unmatched_list_sql)
            tj.unmatched_records_list = ",".join(
                ['%s' % r[0] for r in cur.fetchall()])

        conn.commit()
        cur.close()

        # If no records match, then delete the TableJoin
        #
        if tj.matched_records_count == 0:
            # Delete the table join
            tj.delete()

            # Create an error message, log it, and send it back
            err_msg = ('No records matched.  Make sure that you chose'
                       ' the correct column and that the chosen layer'
                       ' is in the same geographic area.')
            LOGGER.error(err_msg)
            return None, err_msg

    except Exception as ex_obj:
        tj.delete()  # If needed for debugging, don't delete the table join

        traceback.print_exc(sys.exc_info())
        err_msg = "Error Joining table %s to layer %s: %s" % (\
                        table_name,
                        layer_typename,
                        str(ex_obj[0]))
        LOGGER.error(err_msg)

        if err_msg.find('You might need to add explicit type casts.') > -1:
            user_msg = "The chosen column is a different data type than the one expected."
        else:
            user_msg = err_msg

        return None, user_msg

    finally:
        conn.close()

    #--------------------------------------------------
    # Create the Layer in GeoServer from the view
    #--------------------------------------------------
    LOGGER.info(
        'setup_join. Step (9): Create the Layer in GeoServer from the view')
    try:
        LOGGER.info('setup_join. Step (9a): Find the datastore')
        #----------------------------
        # Find the datastore
        #----------------------------
        cat = Catalog(
            settings.GEOSERVER_BASE_URL + "rest",
            settings.GEOSERVER_CREDENTIALS[0],
            settings.GEOSERVER_CREDENTIALS[1])  # "admin", "geoserver")

        workspace = cat.get_workspace('geonode')
        ds_list = cat.get_xml(workspace.datastore_url)
        datastores = [
            datastore_from_index(cat, workspace, n)
            for n in ds_list.findall("dataStore")
        ]
        ds = None

        # Iterate through datastores
        #
        for datastore in datastores:
            #msg ('datastore name:', datastore.name)
            if datastore.name == layer.store:
                ds = datastore

        if ds is None:
            tj.delete()
            err_msg = "Datastore name not found: '%s' (join)" % settings.DB_DATASTORE_NAME
            LOGGER.error(str(ds))
            return None, err_msg

        # Publish the feature
        #
        LOGGER.info('setup_join. Step (9b): Publish the feature type')
        ft = cat.publish_featuretype(view_name, ds, layer.srs, srs=layer.srs)
        #ft = cat.publish_featuretype(double_view_name, ds, layer.srs, srs=layer.srs)

        LOGGER.info('setup_join. Step (9c): Save the feature type')
        cat.save(ft)

    except Exception as e:
        tj.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoServer layer for %s: %s" % (view_name,
                                                                 str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------
    # Set the Layer's default Style
    # ------------------------------------------------------
    sld_success, err_msg = set_style_for_new_join_layer(cat, ft, layer)
    if not sld_success:
        return None, err_msg

    # ------------------------------------------------------------------
    # Create the Layer in GeoNode from the GeoServer Layer
    # ------------------------------------------------------------------
    LOGGER.info(
        'setup_join. Step (10): Create the Layer in GeoNode from the GeoServer Layer'
    )
    try:
        layer_params = {
            "workspace":
            workspace.name,
            "store":
            ds.name,
            "storeType":
            ds.resource_type,
            "typename":
            "%s:%s" %
            (workspace.name.encode('utf-8'), ft.name.encode('utf-8')),
            "title":
            dt.title or 'No title provided',
            "abstract":
            dt.abstract or 'No abstract provided',
            "uuid":
            str(uuid.uuid4()),
            "owner":
            new_table_owner,
            #"bbox_x0": Decimal(ft.latlon_bbox[0]),
            #"bbox_x1": Decimal(ft.latlon_bbox[1]),
            #"bbox_y0": Decimal(ft.latlon_bbox[2]),
            #"bbox_y1": Decimal(ft.latlon_bbox[3])
        }

        layer, created = Layer.objects.get_or_create(name=view_name,
                                                     defaults=layer_params)

        # Set default permissions (public)
        layer.set_default_permissions()
        #set_attributes(layer, overwrite=True)

        tj.join_layer = layer
        tj.save()
    except Exception as e:
        tj.delete()
        traceback.print_exc(sys.exc_info())
        err_msg = "Error creating GeoNode layer for %s: %s" % (view_name,
                                                               str(e))
        LOGGER.error(err_msg)
        return None, err_msg

    # ------------------------------------------------------------------
    # Create LayerAttributes for the new Layer (not done in GeoNode 2.x)
    # ------------------------------------------------------------------
    LOGGER.info(
        'setup_join. Step (11): Create Layer Attributes from the Datatable')
    (attributes_created,
     err_msg) = create_layer_attributes_from_datatable(dt, layer)
    if not attributes_created:
        LOGGER.error(err_msg)
        tj.delete()  # Delete the table join object
        return None, "Sorry there was an error creating the Datatable (s11)"

    return tj, ""
Beispiel #19
0
def export(uid):
    """This function is used to show and handle the export options and uses the GeoServer REST API to export results.
    This function uses the GeoServer configuration client library by boundlessgeo, see:
    https://github.com/boundlessgeo/gsconfig
    To export the results, the GeoServer application has to be running and should be correctly set up to have access
    to the database and the styles should already be imported, see geoserver styles folder.

    GET request: Renders and returns a site showing export options.

    POST request: Gets the export options from the user and exports the results if they are defined in the options.
    The export is made by using GeoServer configuration client library. After the export, the newly created layers
    should be visible in the GeoServer web interface and the WM(T)S links should also work. These links can then be
    used to display the WM(T)S layers in JOSM or a gis.

    Remarks: This function will be redesigned in future versions
    """
    uid = uid.encode("ISO-8859-1")
    if request.method == "POST":
        dm = DevMap.query.filter_by(uid=uid).first()
        if dm.owner == current_user or dm.owner == User.query.filter_by(username="******").first():
            title = request.form["title"]
            listedmap = False
            if "listed" in request.form:
                listedmap = True

            dm.title = title
            dm.listed = listedmap
            cat = Catalog(gs_url + "rest")
            cat.username = gs_user
            cat.password = gs_password

            ws = None
            try:
                ws = cat.get_workspace(gs_workspace)
            except socket.error, e:
                db.session.add(dm)
                db.session.commit()
                return render_template("export.html", uid=uid, error=e, dm=dm)
            st = cat.get_store(gs_store, ws)

            if "maxdevgrid" in request.form:
                feattype = "odf_" + uid + "_maxdevgrid"
                if dm.wmsmaxdevgrid:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":maxdevgrid")
                cat.save(l)
                dm.wmsmaxdevgrid = True
            else:
                dm.wmsmaxdevgrid = True
            if "posdevlines" in request.form:
                feattype = "odf_" + uid + "_posdevlines"
                if dm.wmsposdevlines:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":posdevlines")
                cat.save(l)
                dm.wmsposdevlines = True
            else:
                dm.wmsposdevlines = False
            if "absdevgrid" in request.form:
                feattype = "odf_" + uid + "_absdevgrid"
                if dm.wmsabsdevgrid:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":absdevgrid")
                cat.save(l)
                dm.wmsabsdevgrid = True
            else:
                dm.wmsabsdevgrid = False
            if "matchingrategrid" in request.form:
                feattype = "odf_" + uid + "_matchingrategrid"
                if dm.wmsmatchingrategrid:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":matchingrategrid")
                cat.save(l)
                dm.wmsmatchingrategrid = True
            else:
                dm.wmsmatchingrategrid = False
            if "unmatchedref" in request.form:
                feattype = "odf_" + uid + "_unmatchedref"
                if dm.wmsunmatchedref:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":ReferenceLines")
                cat.save(l)
                dm.unmatchedref = True
            else:
                dm.unmatchedref = False
            if "unmatchedosm" in request.form:
                feattype = "odf_" + uid + "_unmatchedosm"
                if dm.unmatchedosm:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":OSMLines")
                cat.save(l)
                dm.wmsunmatchedosm = True
            else:
                dm.wmsunmatchedosm = False
            if "matchedref" in request.form:
                feattype = "odf_" + uid + "_matchedref"
                if dm.wmsmatchedref:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":ReferenceLines")
                cat.save(l)
                dm.wmsmatchedref = True
            else:
                dm.wmsmatchedref = False
            if "matchedosm" in request.form:
                feattype = "odf_" + uid + "_matchedosm"
                if dm.wmsmatchedosm:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":OSMLines")
                cat.save(l)
                dm.wmsmatchedosm = True
            else:
                dm.wmsmatchedosm = False
            if "minlevenshtein" in request.form:
                feattype = "odf_" + uid + "_minlevenshtein"
                if dm.wmsminlevenshtein:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":ReferenceLines")
                cat.save(l)
                dm.wmsminlevenshtein = True
            else:
                dm.wmsminlevenshtein = False
            if "maxlevenshtein" in request.form:
                feattype = "odf_" + uid + "_maxlevenshtein"
                if dm.wmsmaxlevenshtein:
                    l = cat.get_layer(feattype)
                    if l is not None:
                        cat.delete(l)
                    ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                    if ft is not None:
                        cat.delete(ft)
                ft = cat.publish_featuretype(feattype, st, "EPSG:4326")
                cat.save(ft)
                l = cat.get_layer(feattype)
                l._set_default_style(gs_workspace + ":ReferenceLines")
                cat.save(l)
                dm.wmsmaxlevenshtein = True
            else:
                dm.wmsmaxlevenshtein = False

            db.session.add(dm)
            db.session.commit()
            return render_template("finished.html", uid=uid)
        else:
            return render_template("export.html", uid=uid, error="No User", dm=dm)
Beispiel #20
0
# Check if remote datastore exists on local system
for dataStoreObj in dataStoresObj:
    if dataStoreObj.name == remoteDSName:
        localDSObj = dataStoreObj
        print "Datastore " + localDSObj.name + " already exists"

if not localDSObj:
    localDSObj = cat.create_datastore(remoteDSName, localWSName)     
    localDSObj.connection_parameters.update(host=remoteHost, port=remotePort, \
        database=remoteDatabase, user=remoteUser, passwd=remotePassword, dbtype=remoteDBType)
    cat.save(localDSObj)
    print "Datastore " + localDSObj.name + " created"

# Check if remote layer already exists on local system
localLayersObj = cat.get_layers()

for localLayerObj in localLayersObj:
    if localLayerObj.resource.name == remoteLayerName:
        localLayerName = remoteLayerName
        print "Layer " + remoteLayerName + " already published"

if not localLayerName:
    # Publish remote layer    
    featureType = cat.publish_featuretype(remoteLayerName, localDSObj, WGS84, srs=WGS84)
    print "Published layer " + remoteLayerName
    
    # Update layers in GeoSHAPE
    os.system("sudo geoshape-config updatelayers")


class GeoserverHelper:
    def __init__(self, geoserverUrl = "", geoserverUserName = "", geoserverPW = "", geoserverWorkSpace = "",
        postgreIP = "", postgreUserName = "", postgrePW = ""):

        """use the constructor given arguments if used"""
        self.geoserverUrl = geoserverUrl if geoserverUrl != "" else GEOSERVERURL
        self.geoserverUserName = geoserverUserName if geoserverUserName != "" else GEOSERVERUSERNAME
        self.geoserverPW = geoserverPW if geoserverPW != "" else GEOSERVERPW
        self.geoserverWorkSpace = geoserverWorkSpace if geoserverWorkSpace != "" else "crc"
        self.postgreIP = postgreIP if postgreIP != "" else POSTGREIP
        self.postgreUserName = postgreUserName if postgreUserName != "" else POSTGREUSERNAME
        self.postgrePW = postgrePW if postgrePW != "" else POSTGREPW

        if self.geoserverUrl[-1] != '/':
            raise Exception("GeoserverUrl must end with a slash ('/')")

        self.catalog = Catalog(self.geoserverUrl+"rest/")
        self.catalog.http.add_credentials(self.geoserverUserName,self.geoserverPW)
        try:
            workspaces = self.catalog.get_workspaces()
        except:
            e = sys.exc_info()[0]
            print e
        self.cWorkSpace = self.catalog.get_workspace(self.geoserverWorkSpace)

    def getLayers(self):
        return self.cWorkSpace.catalog.get_layers()

    def insertShapeIntoPostGis(self, shapeFile, databaseName, tableName, encoding=3857):
        '''returns the returnCode of the execution of the insert script, e.g:
        helper.insertShapeIntoPostGis('/home/c815/gsTest/test.shp','crc','testingHelper2')'''

        if not os.path.isfile(shapeFile):
            print "Shape file not found"
            return -1
            cmds = "PGPASSWORD={pgPW} ./createWSFTFromSHP.sh -s {shapeFileF} -d {databaseNameF} -t {tableNameF} -u {postgreUsername} -i {postgreIP}".format(pgPW=self.postgrePW,
                shapeFileF=shapeFile,databaseNameF=databaseName, tableNameF=tableName, postgreUsername=self.postgreUserName, postgreIP=self.postgreIP)
            return subprocess.call(cmds, shell=True)

    def uploadShapeFile(self, shapeFile, storeName):
        shpPlusSidcars = geoserver.util.shapefile_and_friends(shapeFile[:-3])
        shpPlusSidcars
        self.ft = self.catalog.create_featurestore(storeName, shpPlusSidcars, self.cWorkSpace)

    def getStyles(self):
        return self.catalog.get_styles()

    def uploadStyleFile(self, sldFile, styleName, overWrite, workSpace = None):
        f = open(sldFile,'r')
        styleSrc = f.read()
        uploadStyle(styleSrc, styleName, overWrite, workSpace)
        f.close()

    def uploadStyle(self, sldSrc, styleName, overWrite, workSpace = None):
        self.catalog.create_style(styleName,sldSrc, overWrite, workSpace)

    def publishPostGISLayer(self,postGISLayerName, storeName, crs='EPSG:3857'):
        '''cat.publish_featuretype('testingstuff',crcStore,native_crs='EPSG:3857')'''

        store = self.catalog.get_store(storeName)
        if store != None:
            self.catalog.publish_featuretype(postGISLayerName, store, crs)

    def setDefaultStyleForLayer(self, layerName, styleName):
        l = self.catalog.get_layer(layerName)

        sNames = [ i.name for i in self.getStyles() ]
        if styleName not in sNames:
            split = styleName.split(':')
            if len(split) == 2:
                workSpace = styleName.split(':')[0]
                newStyleName = styleName.split(':')[1]
            else:
                return -1
            style = self.catalog.get_style(newStyleName, workSpace)
            if style is None:
                return -1
            if l != None:
                l._set_default_style(styleName)
                self.catalog.save(l)
                return 0

    def createPostGISDataStore(self, storeName, postGisPassword, postGisUser, postGisHost,
        postGisDatabase, workSpace = None):

        #check if connection parameter are valid
        try:
            conn = psycopg2.connect("dbname='{dbName}' user='******' host='{Host}' password='******'".format(dbName=postGisDatabase,
                                                                                                                   dbUser=postGisUser,
                                                                                                                   Host=postGisHost,
                                                                                                                   password=postGisPassword))
        except:
            return False

        w = self.catalog.create_datastore(storeName, workSpace)
        template = Template("""{'validate connections': 'true', 'port': '5432', 
        'Support on the fly geometry simplification': 'true', 
        'create database': 'false', 'dbtype': 'postgis', 
        'Connection timeout': '20', 'namespace': 'http://www.crcproject.com', 
        'Max connection idle time': '300', 'Expose primary keys': 'false', 
        'min connections': '1', 'Max open prepared statements':'50', 
        'passwd': '$passwd', 
        'encode functions': 'false', 
        'max connections': '10', 'Evictor tests per run': '3', 'Loose bbox': 'true', 
        'Evictor run periodicity': '300', 'Estimated extends': 'true', 
        'database': '$database', 
        'fetch size': '1000', 'Test while idle': 'true', 
        'host': '$host', 
        'preparedStatements': 'false', 'schema': 'public', 
        'user': '******'}""")
        dic = ast.literal_eval(template.substitute(
            passwd=postGisPassword,
            user=postGisUser,
            host=postGisHost,
            database=postGisDatabase))
        #'passwd': 'crypt1:Bsaz2AUI8T+6Pj43krA7kg==', 
        #'user': '******'}
        #'database': 'crc', 
        #'host': 'localhost', 
        w.connection_parameters = dic
        self.catalog.save(w)
        return True
Beispiel #22
0
#!/usr/bin/env python
import sys
from geoserver.catalog import Catalog

workspace = sys.argv[1]
datastore = sys.argv[1]
shpname = sys.argv[2].lower()
epsg = 'EPSG:' + sys.argv[3]

catalog = Catalog("http://localhost:2012/geoserver/rest",
                  username='******',
                  password='******')

ws = catalog.get_workspace(workspace)
ds = catalog.get_store(datastore, ws)
publish = catalog.publish_featuretype(shpname, ds, epsg)
catalog.save(publish)
Beispiel #23
0
def logingeoserver():
    form = LoginFormGeoserver()
    mytoken = session.get('mytoken')
    projectid = session.get('project')
    categoryid = session.get('category')
    segmentos = requests.get(
        'https://raw.githubusercontent.com/guilhermeiablo/inventsys2infoambiente/master/dados/ERS_segmentos_rodoviarios.geojson'
    )
    tabelagerada = session.get('tabelagerada')

    if form.validate_on_submit():
        urlgeoserver = str(form.urlgeoserver.data)
        usrgeoserver = str(form.usrgeoserver.data)
        pwdgeoserver = str(form.pwdgeoserver.data)
        workspace = str(form.workspace.data)
        datastore = str(form.datastore.data)

        cat = Catalog(urlgeoserver,
                      username=usrgeoserver,
                      password=pwdgeoserver)
        cite = cat.get_workspace(workspace)
        ds = cat.get_store(datastore, workspace)

        ds.connection_parameters.update(host=session.get('hostinput'),
                                        port='5432',
                                        database=session.get('dbnameinput'),
                                        user=session.get('userinput'),
                                        passwd=session.get('pwdgeoserver'),
                                        dbtype='postgis',
                                        schema='public')
        cat.save(ds)

        if cat:

            flash(f'Dados enviados com sucesso para {form.workspace.data}!',
                  'success')

            for feature in segmentos.json()['features']:
                if projectid == '10762':
                    nomedolayer = str(
                        str(feature['properties']['nome']) + '_PMF_' +
                        str(tabelagerada))
                else:
                    nomedolayer = str(
                        str(feature['properties']['nome']) + '_' +
                        str(tabelagerada))

                while True:
                    try:
                        ft = cat.publish_featuretype(nomedolayer,
                                                     ds,
                                                     'EPSG:4326',
                                                     srs='EPSG:4326')

                    except:
                        # for handle unknown exception
                        # define your parameters
                        urldatastore = urlgeoserver + 'reset'
                        headersdatastore = {'Content-Type': 'text/xml'}
                        authdatastore = (usrgeoserver, pwdgeoserver)
                        r = requests.post(urldatastore,
                                          headers=headersdatastore,
                                          auth=authdatastore)
                        break

            return redirect(
                url_for('logininfoambiente',
                        mytoken=session['mytoken'],
                        project=session['project'],
                        category=session['category']))
        else:
            flash('Erro ao conectar ao servidor. Tente novamente.', 'danger')

    return render_template("logingeoserver.html",
                           title='LoginGeoserver',
                           form=form,
                           mytoken=mytoken,
                           project=session['project'],
                           category=session['category'])
Beispiel #24
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)
        
        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(
                        [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
                        rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)


        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assert_(self.cat.get_layer('import') is None)
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assert_(self.cat.get_layer('import') is not None)
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            self.cat.delete(ds)

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(ds, "import", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })

    def testCoverageStoreCreate(self):
        ds = self.cat.create_coveragestore2("coverage_gsconfig")
        ds.data_url = "file:data/mytiff.tiff"
        self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs) 

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(
            [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
            rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        ws.type = "WMS"
        self.cat.save(ws)
     
    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            new_layer = self.cat.create_wmslayer(wmstest, wmsstore, layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])
 
        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)

        self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError, 
            lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf)
        )

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf':    'test/data/Pk50095.tfw',
            'prj':    'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)
        )

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)


    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw':    'test/data/Pk50095.tfw',
            'prj':    'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        # TODO: Uploading WorldImage file no longer works???
        # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        # self.assertRaises(
        #         ConflictingDataError, 
        #         lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        # )

        self.assertRaises(
            UploadError, 
            lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf)
        )

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)
        )

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = "Not the original attribution"

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")
     
        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])


    def testStyles(self):
        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assert_(f is not None)
        self.assertEqual(f.name, fred.name)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assert_(jed is None)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assert_(self.cat.get_style("jed", workspace="topp") is None)

        # attempt creating new style
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assert_(f is not None)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp")
        self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assert_(ned is not None)
        self.assert_(zed is not None)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        self.assertEqual("topp:ned", lyr.default_style.fqn)
        self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self): 
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)
    def add_to_geoserver(self):
        # Inspired (copied :) ) from https://groups.google.com/forum/#!msg/geonode-users/R-u57r8aECw/AuEpydZayfIJ # TODO : check license

        # We connect to the catalog
        gsUrl = settings.OGC_SERVER['default']['LOCATION'] + "rest"
        gsUser = settings.OGC_SERVER['default']['USER']
        gsPassword = settings.OGC_SERVER['default']['PASSWORD']
        cat = Catalog(gsUrl, gsUser, gsPassword)
        if cat is None:
            raise Exception('unable to instantiate geoserver catalog')

        # We get the workspace
        ws = cat.get_workspace(settings.DEFAULT_WORKSPACE)
        if ws is None:
            raise Exception('workspace %s not found in geoserver' %
                            settings.DEFAULT_WORKSPACE)

        # We get or create the datastore
        store = cat.get_store(self.datastore_name, ws)
        if store is None:
            store = cat.create_datastore(self.datastore_name, ws)
            store.connection_parameters.update(host="postgres",
                                               port="5432",
                                               database="postgres",
                                               user="******",
                                               passwd="postgres",
                                               schema='offline_osm',
                                               dbtype="postgis")
            cat.save(store)
        if store is None:
            raise Exception('datastore %s not found in geoserver' %
                            self.datastore_name)

        # We get or create each layer then register it into geonode
        for layer in Command.layers:
            layername = layer["name"]
            self.stdout.write('Adding {} to geoserver...'.format(layername))

            layer_exists = (not cat.get_layer(layername) is None)

            if not layer_exists or not self.options['no_overwrite']:
                self.stdout.write(
                    ' layer does not exists or no_overwrite unset, we add...')

                ft = cat.publish_featuretype(layername,
                                             store,
                                             'EPSG:4326',
                                             srs='EPSG:4326')
                if ft is None:
                    raise Exception('unable to publish layer %s' % layername)
                ft.title = 'OpenStreetMap Offline - ' + layername.split(
                    '_')[-1]
                ft.abstract = 'This is an automated extract of the OpenStreetMap database. It is available offline. It is intended to be used as a background layer, but the data can also server analysis purposes.'
                cat.save(ft)

                self.stdout.write(
                    ' adding the style for {}...'.format(layername))
                # We get or create the workspace
                style_path = os.path.join(os.path.dirname(__file__), '..',
                                          '..', 'styles', layer["style_name"])
                if not os.path.exists(style_path):
                    self.stdout.write(
                        ' The file {} does not exist. No style will be applied for {}.'
                        .format(style_path, layername))
                else:
                    cat.create_style(layer["style_name"],
                                     open(style_path, 'r').read(),
                                     overwrite=True,
                                     workspace=settings.DEFAULT_WORKSPACE,
                                     raw=True)

                    style = cat.get_style(layer["style_name"], ws)
                    if style is None:
                        raise Exception('style not found (%s)' %
                                        (layer["style_name"]))

                    publishing = cat.get_layer(layername)
                    if publishing is None:
                        raise Exception('layer not found (%s)' % layerName)

                    publishing.default_style = style
                    cat.save(publishing)

                self.stdout.write(
                    ' registering {} into geonode...'.format(layername))
                resource = cat.get_resource(layername, store, ws)
                if resource is None:
                    raise Exception('resource not found (%s)' % layername)

                layer, created = Layer.objects.get_or_create(name=layername)
                layer.workspace = ws.name
                layer.store = store.name
                layer.storeType = store.resource_type
                layer.typename = "%s:%s" % (ws.name.encode('utf-8'),
                                            resource.name.encode('utf-8'))
                layer.title = resource.title
                layer.abstract = resource.abstract
                layer.temporal_extent_start = self.import_timestamp
                layer.temporal_extent_end = self.import_timestamp
                try:
                    layer.save()
                except TypeError as e:
                    # We ignore a specific error in Geonode resourcebase_post_save when settings.SITEURL has no host (relative URL)
                    self.stdout.write(
                        'TODO : report Geonode exception in resourcebase_post_save when settings.SITEURL has no host (relative URL) : '
                        + str(e))
                    pass

                if created:
                    layer.set_default_permissions()
            else:
                self.stdout.write(' layer already exists, we skip.')

        # We get or create the laygroup
        self.stdout.write('Adding layergroup to geoserver...')
        layername = 'offline_osm'
        layergroup_exists = (not cat.get_layergroup(layername) is None)
        if not layer_exists or not self.options['no_overwrite']:
            self.stdout.write(
                ' layer does not exists or no_overwrite unset, we add...')

            layergroup = cat.get_layergroup(
                layername, workspace=settings.DEFAULT_WORKSPACE)
            if layergroup is None:
                layergroup = cat.create_layergroup(
                    layername,
                    layers=layernames,
                    workspace=settings.DEFAULT_WORKSPACE)
                if layergroup is None:
                    raise Exception('unable to publish layer %s' % layername)
            layergroup.title = 'OpenStreetMap Offline'
            layergroup.abstract = 'This is an automated extract of the OpenStreetMap database. It is available offline. It is intended to be used as a background layer, but the data can also server analysis purposes.'
            cat.save(layergroup)
        else:
            self.stdout.write(' layergroup already exists, we skip.')

        # TODO : can we add layergroups to Geonode ?
        self.stdout.write(
            "laygroup won't be added to geonode (not supported yet)")
# Criando um workspace
ws = cat.create_workspace('gsc_test', 'http://localhost:8082/geoserver/testWs')

# Criando um store
ds = cat.create_datastore('bdg_gsc', 'gsc_test')
ds.connection_parameters.update(host='localhost',
                                port='5432',
                                database='bdg_gsc',
                                user='******',
                                passwd='user',
                                dbtype='postgis',
                                schema='public')
cat.save(ds)

# Adicionando layers
ft = cat.publish_featuretype('mun_pr', ds, 'EPSG:4674', srs='EPSG:4674')
ft1 = cat.publish_featuretype('escolas_pr', ds, 'EPSG:4674', srs='EPSG:4674')

# Adicionando styles - nao adiciona sld completo
#with open("/home/user/gsc_style.sld") as f:
#   cat.create_style('gsc_style', f.read())

# Atribuindo o estilo a camada
# SLD adicionado no geoserver
style1 = cat.get_style("gsc_test:gsc_style_mun")
layer1 = cat.get_layer("mun_pr")
layer1.default_style = style1
cat.save(layer1)

style2 = cat.get_style("gsc_test:gsc_style_esc")
layer2 = cat.get_layer("escolas_pr")
Beispiel #27
0
def main(options):
    #connect to geoserver
    cat = Catalog("http://localhost:8080/geoserver/rest", "admin", options.gpw)

    #create datrastore for URB schema
    ws = cat.create_workspace(options.alias, 'imio.be')

    ds = cat.create_datastore(options.alias, ws)
    ds.connection_parameters.update(host=options.urbanUrl,
                                    port="5432",
                                    database=options.database,
                                    user="******",
                                    passwd=options.ropw,
                                    dbtype="postgis")

    cat.save(ds)
    ds = cat.get_store(options.alias)

    #config object
    urb = {
        "capa": "Parcelles",
        "toli": "cadastre_ln_toponymiques",
        "canu": "cadastre_pt_num",
        "cabu": "Batiments",
        "gept": "cadastre_points_generaux",
        "gepn": "cadastre_pol_gen",
        "inpt": "point",
        "geli": "cadastre_ln_generales",
        "inli": "cadastre_ln_informations",
        "topt": "point",
    }

    #connect to tables and create layers and correct urban styles
    for table in urb:
        style = urb[table]
        ft = cat.publish_featuretype(table, ds, 'EPSG:31370', srs='EPSG:31370')
        ft.default_style = style
        cat.save(ft)
        resource = ft.resource
        resource.title = options.alias + "_" + table
        resource.save()

        layer, created = Layer.objects.get_or_create(
            name=layerName,
            defaults={
                "workspace":
                ws.name,
                "store":
                ds.name,
                "storeType":
                ds.resource_type,
                "typename":
                "%s:%s" %
                (ws.name.encode('utf-8'), resource.name.encode('utf-8')),
                "title":
                resource.title or 'No title provided',
                "abstract":
                resource.abstract or 'No abstract provided',
                #"owner": owner,
                "uuid":
                str(uuid.uuid4()),
                "bbox_x0":
                Decimal(resource.latlon_bbox[0]),
                "bbox_x1":
                Decimal(resource.latlon_bbox[1]),
                "bbox_y0":
                Decimal(resource.latlon_bbox[2]),
                "bbox_y1":
                Decimal(resource.latlon_bbox[3])
            })
        set_attributes(layer, overwrite=True)
        if created: layer.set_default_permissions()
Beispiel #28
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'],
                           username=GSPARAMS['GSUSER'],
                           password=GSPARAMS['GSPASSWORD'])

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assertIsNone(self.cat.get_layer('import'))
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assertIsNotNone(self.cat.get_layer('import'))
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            try:
                if lyr:
                    self.cat.delete(lyr)
                    self.cat.delete(lyr.resource)
                if ds:
                    self.cat.delete(ds)
            except Exception:
                pass

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(
            ds, "import", {
                'shp': 'test/data/states.shp',
                'shx': 'test/data/states.shx',
                'dbf': 'test/data/states.dbf',
                'prj': 'test/data/states.prj'
            })

    @drop_table('import2')
    def testVirtualTables(self):
        ds = self.cat.create_datastore("gsconfig_import_test2")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test2")
        self.cat.add_data_to_store(
            ds, "import2", {
                'shp': 'test/data/states.shp',
                'shx': 'test/data/states.shx',
                'dbf': 'test/data/states.dbf',
                'prj': 'test/data/states.prj'
            })
        store = self.cat.get_store("gsconfig_import_test2")
        geom = JDBCVirtualTableGeometry('the_geom', 'MultiPolygon', '4326')
        ft_name = 'my_jdbc_vt_test'
        epsg_code = 'EPSG:4326'
        sql = "select * from import2 where 'STATE_NAME' = 'Illinois'"
        keyColumn = None
        parameters = None

        jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn,
                                   parameters)
        self.cat.publish_featuretype(ft_name,
                                     store,
                                     epsg_code,
                                     jdbc_virtual_table=jdbc_vt)

    # DISABLED; this test works only in the very particular case
    # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR
    # def testCoverageStoreCreate(self):
    #     ds = self.cat.create_coveragestore2("coverage_gsconfig")
    #     ds.data_url = "file:test/data/mytiff.tiff"
    #     self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs)

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(
            ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEqual(set(rs.request_srs_list), srs_before,
                         str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(set(rs.request_srs_list), srs_after,
                         str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEqual(set(rs.response_srs_list), srs_before,
                         str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(set(rs.response_srs_list), srs_after,
                         str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEqual(set(rs.supported_formats), formats,
                         str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(set(rs.supported_formats), formats_after,
                         str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&"  # noqa: E501
        ws.type = "WMS"
        self.cat.save(ws)

    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://mesonet.agron.iastate.edu/cgi-bin/wms/iowa/rainfall.cgi?VERSION=1.1.1&REQUEST=GetCapabilities&SERVICE=WMS&"  # noqa: E501
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(workspace=wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            # sanitize the layer name - validation will fail on newer geoservers
            name = layer.replace(':', '_')
            self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

        changed_layer = added_layers[0]
        self.assertEqual(True, changed_layer.advertised)
        self.assertEqual(True, changed_layer.enabled)
        changed_layer.advertised = False
        changed_layer.enabled = False
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        changed_layer = wmsstore.get_resources()[0]
        changed_layer.fetch()
        self.assertEqual(False, changed_layer.advertised)
        self.assertEqual(False, changed_layer.enabled)

        # Testing projection and projection policy changes
        changed_layer.projection = "EPSG:900913"
        changed_layer.projection_policy = "REPROJECT_TO_DECLARED"
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        layer = self.cat.get_layer(changed_layer.name)
        self.assertEqual(layer.resource.projection_policy,
                         changed_layer.projection_policy)
        self.assertEqual(layer.resource.projection, changed_layer.projection)

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.items():
            self.assertEqual(v, shapefile_plus_sidecars[k])

        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars,
                                     sf)

        self.assertIsNotNone(self.cat.get_resource("states_test",
                                                   workspace=sf))

        self.assertRaises(
            ConflictingDataError, lambda: self.cat.create_featurestore(
                "states_test", shapefile_plus_sidecars, sf))

        self.assertRaises(
            UploadError, lambda: self.cat.create_coveragestore(
                "states_raster_test", shapefile_plus_sidecars, sf))

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf))

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assertIsNone(self.cat.get_layer("states_test"))

    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        self.assertIsNotNone(self.cat.get_resource("Pk50095", workspace=sf))

        self.assertRaises(
            ConflictingDataError,
            lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf))

        self.assertRaises(
            UploadError, lambda: self.cat.create_featurestore(
                "Pk50095_vector", tiffdata, sf))

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff))

        self.cat.create_coveragestore_external_geotiff(
            "Pk50095_ext", 'file:test/data/Pk50095.tif', sf)

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = {
            'title': 'Not the original attribution',
            'width': '123',
            'height': '321',
            'href': 'http://www.georchestra.org',
            'url':
            'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg',
            'type': 'image/jpeg'
        }

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")

        old_default_style = lyr.default_style
        lyr.default_style = next(
            (s for s in lyr.styles if s.name == "pophatch"))
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])

    def testStyles(self):
        # check count before tests (upload)
        count = len(self.cat.get_styles())

        # upload new style, verify existence
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("fred", fred_sld.read())
        fred = self.cat.get_style("fred")
        self.assertIsNotNone(fred)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        with open("test/ted.sld") as ted_sld:
            self.cat.create_style("fred", ted_sld.read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assertIsNotNone(fred)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assertIsNone(self.cat.get_style("fred"))

        # attempt creating new style
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("fred", fred_sld.read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assertIsNotNone(f)
        self.assertEqual(f.name, fred.name)

        # compare count after upload
        self.assertEqual(count + 1, len(self.cat.get_styles()))

        # attempt creating a new style without "title"
        with open("test/notitle.sld") as notitle_sld:
            self.cat.create_style("notitle", notitle_sld.read())
        notitle = self.cat.get_style("notitle")
        self.assertEqual(None, notitle.sld_title)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("jed", fred_sld.read(), workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assertIsNone(jed)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertIsNotNone(jed)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assertIsNotNone(jed)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        with open("test/ted.sld") as ted_sld:
            self.cat.create_style("jed",
                                  ted_sld.read(),
                                  overwrite=True,
                                  workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertIsNotNone(jed)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assertIsNone(self.cat.get_style("jed", workspace="topp"))

        # attempt creating new style
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("jed", fred_sld.read(), workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assertIsNotNone(f)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        with open("test/fred.sld") as fred_sld:
            self.cat.create_style("ned",
                                  fred_sld.read(),
                                  overwrite=True,
                                  workspace="topp")
        with open("test/fred.sld") as ted_sld:
            self.cat.create_style("zed",
                                  ted_sld.read(),
                                  overwrite=True,
                                  workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assertIsNotNone(ned)
        self.assertIsNotNone(zed)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        if lyr.default_style is not None:
            self.assertEqual("topp:ned", lyr.default_style.fqn)
            self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self):
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assertIsNone(ws)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assertTrue(states.enabled)
        states.enabled = False
        self.assertFalse(states.enabled)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assertFalse(states.enabled)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assertTrue(states.enabled)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

    def testImageMosaic(self):
        """
            Test case for Issue #110
        """
        # testing the mosaic creation
        name = 'cea_mosaic'
        with open('test/data/mosaic/cea.zip', 'rb') as data:
            self.cat.create_imagemosaic(name, data)

        # get the layer resource back
        self.cat._cache.clear()
        resource = self.cat.get_layer(name).resource

        self.assertIsNotNone(resource)

        # delete granule from mosaic
        coverage = name
        store = self.cat.get_store(name)
        granules = self.cat.list_granules(coverage, store)
        self.assertEqual(1, len(granules['features']))
        granule_id = name + '.1'
        self.cat.mosaic_delete_granule(coverage, store, granule_id)
        granules = self.cat.list_granules(coverage, store)
        self.assertEqual(0, len(granules['features']))
        """
          testing external Image mosaic creation
        """
        name = 'cea_mosaic_external'
        path = 'test/data/mosaic/external'
        self.cat.create_imagemosaic(name, path, workspace='topp')
        self.cat._cache.clear()
        resource = self.cat.get_layer("external").resource
        self.assertIsNotNone(resource)

        # add granule to mosaic
        granule_path = 'test/data/mosaic/granules/cea_20150102.tif'
        self.cat.add_granule(granule_path, name, workspace='topp')
        granules = self.cat.list_granules("external", name, 'topp')
        self.assertEqual(2, len(granules['features']))

        # add external granule to mosaic
        granule_path = os.path.join(
            os.getcwd(), 'test/data/mosaic/granules/cea_20150103.zip')
        self.cat.add_granule(granule_path, name, workspace='topp')
        granules = self.cat.list_granules("external", name, 'topp')
        self.assertEqual(3, len(granules['features']))

        # Delete store
        store = self.cat.get_store(name)
        self.cat.delete(store, purge=True, recurse=True)
        self.cat._cache.clear()

    def testTimeDimension(self):
        sf = self.cat.get_workspace("sf")
        files = shapefile_and_friends(
            os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date"))
        self.cat.create_featurestore("boxes_with_end_date", files, sf)

        get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer(
            'boxes_with_end_date').resource  # noqa: E501

        # configure time as LIST
        resource = get_resource()
        timeInfo = DimensionInfo("time",
                                 "true",
                                 "LIST",
                                 None,
                                 "ISO8601",
                                 None,
                                 attribute="date")
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual("date", timeInfo.attribute)
        self.assertEqual("ISO8601", timeInfo.units)

        # disable time dimension
        timeInfo = resource.metadata['time']
        timeInfo.enabled = False
        # since this is an xml property, it won't get written unless we modify it
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(False, timeInfo.enabled)

        # configure with interval, end_attribute and enable again
        timeInfo.enabled = True
        timeInfo.presentation = 'DISCRETE_INTERVAL'
        timeInfo.resolution = '3 days'
        timeInfo.end_attribute = 'enddate'
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation)
        self.assertEqual('3 days', timeInfo.resolution_str())
        self.assertEqual('enddate', timeInfo.end_attribute)
class GeoserverHelper:
    def __init__(self,
                 geoserverUrl="",
                 geoserverUserName="",
                 geoserverPW="",
                 geoserverWorkSpace="",
                 postgreIP="",
                 postgreUserName="",
                 postgrePW=""):
        """use the constructor given arguments if used"""
        self.geoserverUrl = geoserverUrl if geoserverUrl != "" else GEOSERVERURL
        self.geoserverUserName = geoserverUserName if geoserverUserName != "" else GEOSERVERUSERNAME
        self.geoserverPW = geoserverPW if geoserverPW != "" else GEOSERVERPW
        self.geoserverWorkSpace = geoserverWorkSpace if geoserverWorkSpace != "" else "crc"
        self.postgreIP = postgreIP if postgreIP != "" else POSTGREIP
        self.postgreUserName = postgreUserName if postgreUserName != "" else POSTGREUSERNAME
        self.postgrePW = postgrePW if postgrePW != "" else POSTGREPW

        if self.geoserverUrl[-1] != '/':
            raise Exception("GeoserverUrl must end with a slash ('/')")

        self.catalog = Catalog(self.geoserverUrl + "rest/")
        self.catalog.http.add_credentials(self.geoserverUserName,
                                          self.geoserverPW)
        try:
            workspaces = self.catalog.get_workspaces()
        except:
            e = sys.exc_info()[0]
            print e
        self.cWorkSpace = self.catalog.get_workspace(self.geoserverWorkSpace)

    def getLayers(self):
        return self.cWorkSpace.catalog.get_layers()

    def insertShapeIntoPostGis(self,
                               shapeFile,
                               databaseName,
                               tableName,
                               encoding=3857):
        '''returns the returnCode of the execution of the insert script, e.g:
        helper.insertShapeIntoPostGis('/home/c815/gsTest/test.shp','crc','testingHelper2')'''

        if not os.path.isfile(shapeFile):
            print "Shape file not found"
            return -1
            cmds = "PGPASSWORD={pgPW} ./createWSFTFromSHP.sh -s {shapeFileF} -d {databaseNameF} -t {tableNameF} -u {postgreUsername} -i {postgreIP}".format(
                pgPW=self.postgrePW,
                shapeFileF=shapeFile,
                databaseNameF=databaseName,
                tableNameF=tableName,
                postgreUsername=self.postgreUserName,
                postgreIP=self.postgreIP)
            return subprocess.call(cmds, shell=True)

    def uploadShapeFile(self, shapeFile, storeName):
        shpPlusSidcars = geoserver.util.shapefile_and_friends(shapeFile[:-3])
        shpPlusSidcars
        self.ft = self.catalog.create_featurestore(storeName, shpPlusSidcars,
                                                   self.cWorkSpace)

    def getStyles(self):
        return self.catalog.get_styles()

    def uploadStyleFile(self, sldFile, styleName, overWrite, workSpace=None):
        f = open(sldFile, 'r')
        styleSrc = f.read()
        uploadStyle(styleSrc, styleName, overWrite, workSpace)
        f.close()

    def uploadStyle(self, sldSrc, styleName, overWrite, workSpace=None):
        self.catalog.create_style(styleName, sldSrc, overWrite, workSpace)

    def publishPostGISLayer(self,
                            postGISLayerName,
                            storeName,
                            crs='EPSG:3857'):
        '''cat.publish_featuretype('testingstuff',crcStore,native_crs='EPSG:3857')'''

        store = self.catalog.get_store(storeName)
        if store != None:
            self.catalog.publish_featuretype(postGISLayerName, store, crs)

    def setDefaultStyleForLayer(self, layerName, styleName):
        l = self.catalog.get_layer(layerName)

        sNames = [i.name for i in self.getStyles()]
        if styleName not in sNames:
            split = styleName.split(':')
            if len(split) == 2:
                workSpace = styleName.split(':')[0]
                newStyleName = styleName.split(':')[1]
            else:
                return -1
            style = self.catalog.get_style(newStyleName, workSpace)
            if style is None:
                return -1
            if l != None:
                l._set_default_style(styleName)
                self.catalog.save(l)
                return 0

    def createPostGISDataStore(self,
                               storeName,
                               postGisPassword,
                               postGisUser,
                               postGisHost,
                               postGisDatabase,
                               workSpace=None):

        #check if connection parameter are valid
        try:
            conn = psycopg2.connect(
                "dbname='{dbName}' user='******' host='{Host}' password='******'"
                .format(dbName=postGisDatabase,
                        dbUser=postGisUser,
                        Host=postGisHost,
                        password=postGisPassword))
        except:
            return False

        w = self.catalog.create_datastore(storeName, workSpace)
        template = Template(
            """{'validate connections': 'true', 'port': '5432', 
        'Support on the fly geometry simplification': 'true', 
        'create database': 'false', 'dbtype': 'postgis', 
        'Connection timeout': '20', 'namespace': 'http://www.crcproject.com', 
        'Max connection idle time': '300', 'Expose primary keys': 'false', 
        'min connections': '1', 'Max open prepared statements':'50', 
        'passwd': '$passwd', 
        'encode functions': 'false', 
        'max connections': '10', 'Evictor tests per run': '3', 'Loose bbox': 'true', 
        'Evictor run periodicity': '300', 'Estimated extends': 'true', 
        'database': '$database', 
        'fetch size': '1000', 'Test while idle': 'true', 
        'host': '$host', 
        'preparedStatements': 'false', 'schema': 'public', 
        'user': '******'}""")
        dic = ast.literal_eval(
            template.substitute(passwd=postGisPassword,
                                user=postGisUser,
                                host=postGisHost,
                                database=postGisDatabase))
        #'passwd': 'crypt1:Bsaz2AUI8T+6Pj43krA7kg==',
        #'user': '******'}
        #'database': 'crc',
        #'host': 'localhost',
        w.connection_parameters = dic
        self.catalog.save(w)
        return True
Beispiel #30
0
class ModifyingTests(unittest.TestCase):

    def setUp(self):
        self.cat = Catalog(GSPARAMS['GSURL'], username=GSPARAMS['GSUSER'], password=GSPARAMS['GSPASSWORD'])

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(
                        [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
                        rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)


        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assert_(self.cat.get_layer('import') is None)
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assert_(self.cat.get_layer('import') is not None)
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            try:
                if lyr:
                    self.cat.delete(lyr)
                    self.cat.delete(lyr.resource)
                if ds:
                    self.cat.delete(ds)
            except:
                pass

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(ds, "import", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })

    @drop_table('import2')
    def testVirtualTables(self):
        ds = self.cat.create_datastore("gsconfig_import_test2")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test2")
        self.cat.add_data_to_store(ds, "import2", {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        })
        store = self.cat.get_store("gsconfig_import_test2")
        geom = JDBCVirtualTableGeometry('the_geom','MultiPolygon','4326')
        ft_name = 'my_jdbc_vt_test'
        epsg_code = 'EPSG:4326'
        sql = "select * from import2 where 'STATE_NAME' = 'Illinois'"
        keyColumn = None
        parameters = None

        jdbc_vt = JDBCVirtualTable(ft_name, sql, 'false', geom, keyColumn, parameters)
        ft = self.cat.publish_featuretype(ft_name, store, epsg_code, jdbc_virtual_table=jdbc_vt)

    # DISABLED; this test works only in the very particular case
    # "mytiff.tiff" is already present into the GEOSERVER_DATA_DIR
    # def testCoverageStoreCreate(self):
    #     ds = self.cat.create_coveragestore2("coverage_gsconfig")
    #     ds.data_url = "file:test/data/mytiff.tiff"
    #     self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs)

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(
            [("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")],
            rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before, str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after, str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before, str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after, str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats, str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after, str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        ws.type = "WMS"
        self.cat.save(ws)

    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            # sanitize the layer name - validation will fail on newer geoservers
            name = layer.replace(':', '_')
            new_layer = self.cat.create_wmslayer(wmstest, wmsstore, name, nativeName=layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

        changed_layer = added_layers[0]
        self.assertEqual(True, changed_layer.advertised)
        self.assertEqual(True, changed_layer.enabled)
        changed_layer.advertised = False
        changed_layer.enabled = False
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        changed_layer = wmsstore.get_resources()[0]
        changed_layer.fetch()
        self.assertEqual(False, changed_layer.advertised)
        self.assertEqual(False, changed_layer.enabled)

        # Testing projection and projection policy changes
        changed_layer.projection = "EPSG:900913"
        changed_layer.projection_policy = "REPROJECT_TO_DECLARED"
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        layer = self.cat.get_layer(changed_layer.name)
        self.assertEqual(layer.resource.projection_policy, changed_layer.projection_policy)
        self.assertEqual(layer.resource.projection, changed_layer.projection)

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])

        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)

        self.assert_(self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError,
            lambda: self.cat.create_featurestore("states_test", shapefile_plus_sidecars, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster_test", shapefile_plus_sidecars, sf)
        )

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf)
        )

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)


    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw':  'test/data/Pk50095.tfw',
            'prj':  'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        self.assertRaises(
                ConflictingDataError,
                lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        )

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("Pk50095_vector", tiffdata, sf)
        )

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw':  'test/data/states.shx',
            'prj':  'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff)
        )

        ft_ext = self.cat.create_coveragestore_external_geotiff("Pk50095_ext", 'file:test/data/Pk50095.tif', sf)

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = { 'title': 'Not the original attribution',
                            'width': '123',
                            'height': '321',
                            'href': 'http://www.georchestra.org',
                            'url': 'https://www.cigalsace.org/portail/cigal/documents/page/mentions-legales/Logo_geOrchestra.jpg',
                            'type': 'image/jpeg' }

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")

        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])


    def testStyles(self):
        # check count before tests (upload)
        count = len(self.cat.get_styles())

        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred", open("test/ted.sld").read(), overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assert_(f is not None)
        self.assertEqual(f.name, fred.name)

        # compare count after upload
        self.assertEqual(count +1, len(self.cat.get_styles()))

        # attempt creating a new style without "title"
        self.cat.create_style("notitle", open("test/notitle.sld").read())
        notitle = self.cat.get_style("notitle")
        self.assertEqual(None, notitle.sld_title)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assert_(jed is None)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        self.cat.create_style("jed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assert_(self.cat.get_style("jed", workspace="topp") is None)

        # attempt creating new style
        self.cat.create_style("jed", open("test/fred.sld").read(), workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assert_(f is not None)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("ned", open("test/fred.sld").read(), overwrite=True, workspace="topp")
        self.cat.create_style("zed", open("test/ted.sld").read(), overwrite=True, workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assert_(ned is not None)
        self.assert_(zed is not None)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        self.assertEqual("topp:ned", lyr.default_style.fqn)
        self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self):
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads', 'tasmania_cities'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, ['tasmania_state_boundaries', 'tasmania_water_bodies', 'tasmania_roads'], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

    def testImageMosaic(self):
        """
            Test case for Issue #110
        """
        # testing the mosaic creation
        name = 'cea_mosaic'
        data = open('test/data/mosaic/cea.zip', 'rb')
        self.cat.create_imagemosaic(name, data)

        # get the layer resource back
        self.cat._cache.clear()
        resource = self.cat.get_layer(name).resource

        self.assert_(resource is not None)

        # delete granule from mosaic
        coverage = name
        store = self.cat.get_store(name)
        granule_id = name + '.1'
        self.cat.mosaic_delete_granule(coverage, store, granule_id)


    def testTimeDimension(self):
        sf = self.cat.get_workspace("sf")
        files = shapefile_and_friends(os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date"))
        self.cat.create_featurestore("boxes_with_end_date", files, sf)

        get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer('boxes_with_end_date').resource

        # configure time as LIST
        resource = get_resource()
        timeInfo = DimensionInfo("time", "true", "LIST", None, "ISO8601", None, attribute="date")
        resource.metadata = {'time':timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual("date", timeInfo.attribute)
        self.assertEqual("ISO8601", timeInfo.units)

        # disable time dimension
        timeInfo = resource.metadata['time']
        timeInfo.enabled = False
        # since this is an xml property, it won't get written unless we modify it
        resource.metadata = {'time' : timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(False, timeInfo.enabled)

        # configure with interval, end_attribute and enable again
        timeInfo.enabled = True
        timeInfo.presentation = 'DISCRETE_INTERVAL'
        timeInfo.resolution = '3 days'
        timeInfo.end_attribute = 'enddate'
        resource.metadata = {'time' : timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation)
        self.assertEqual('3 days', timeInfo.resolution_str())
        self.assertEqual('enddate', timeInfo.end_attribute)
workspace = "geonode"
native_crs = 'EPSG:32717'
for schema in schemas:
    print '*' * 50
    ds = cat.create_datastore(schema, workspace)
    ds.connection_parameters.update(host="10.10.201.4",
                                    port="5432",
                                    database="gis_db",
                                    user="******",
                                    passwd="test",
                                    dbtype="postgis",
                                    schema=schema)
    try:
        cat.save(ds)
    except:
        print schema + ' datastore already exist'
        #ds = cat.get_store(schema)
    finally:
        all_layers = ds.get_resources(available=True)
        i = 0
        for layer in all_layers:
            #print layer
            try:
                ft = cat.publish_featuretype(layer, ds, native_crs)
                i += 1
            except:
                print 'Dont import:', layer
                next
        print 'Completed', i, 'of', len(all_layers), 'from', schema
        #cat.save(ft)
Beispiel #32
0
class ModifyingTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testFeatureTypeSave(self):
        # test saving round trip
        rs = self.cat.get_resource("bugsites")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"
        enabled = rs.enabled

        # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(new_abstract, rs.abstract)
        self.assertEqual(enabled, rs.enabled)

        # Change keywords on server
        rs.keywords = ["bugsites", "gsconfig"]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(["bugsites", "gsconfig"], rs.keywords)
        self.assertEqual(enabled, rs.enabled)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("bugsites")
        self.assertEqual(old_abstract, rs.abstract)

    def testDataStoreCreate(self):
        ds = self.cat.create_datastore("vector_gsconfig")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)

    def testPublishFeatureType(self):
        # Use the other test and store creation to load vector data into a database
        # @todo maybe load directly to database?
        try:
            self.testDataStoreCreateAndThenAlsoImportData()
        except FailedRequestError:
            pass
        try:
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            ds = self.cat.get_store("gsconfig_import_test")
            # make sure it's gone
            self.assert_(self.cat.get_layer('import') is None)
            self.cat.publish_featuretype("import", ds, native_crs="EPSG:4326")
            # and now it's not
            self.assert_(self.cat.get_layer('import') is not None)
        finally:
            # tear stuff down to allow the other test to pass if we run first
            ds = self.cat.get_store("gsconfig_import_test")
            lyr = self.cat.get_layer('import')
            # Delete the existing layer and resource to allow republishing.
            self.cat.delete(lyr)
            self.cat.delete(lyr.resource)
            self.cat.delete(ds)

    def testDataStoreModify(self):
        ds = self.cat.get_store("sf")
        self.assertFalse("foo" in ds.connection_parameters)
        ds.connection_parameters = ds.connection_parameters
        ds.connection_parameters["foo"] = "bar"
        orig_ws = ds.workspace.name
        self.cat.save(ds)
        ds = self.cat.get_store("sf")
        self.assertTrue("foo" in ds.connection_parameters)
        self.assertEqual("bar", ds.connection_parameters["foo"])
        self.assertEqual(orig_ws, ds.workspace.name)

    @drop_table('import')
    def testDataStoreCreateAndThenAlsoImportData(self):
        ds = self.cat.create_datastore("gsconfig_import_test")
        ds.connection_parameters.update(**DBPARAMS)
        self.cat.save(ds)
        ds = self.cat.get_store("gsconfig_import_test")
        self.cat.add_data_to_store(
            ds, "import", {
                'shp': 'test/data/states.shp',
                'shx': 'test/data/states.shx',
                'dbf': 'test/data/states.dbf',
                'prj': 'test/data/states.prj'
            })

    def testCoverageStoreCreate(self):
        ds = self.cat.create_coveragestore2("coverage_gsconfig")
        ds.data_url = "file:data/mytiff.tiff"
        self.cat.save(ds)

    def testCoverageStoreModify(self):
        cs = self.cat.get_store("sfdem")
        self.assertEqual("GeoTIFF", cs.type)
        cs.type = "WorldImage"
        self.cat.save(cs)
        cs = self.cat.get_store("sfdem")
        self.assertEqual("WorldImage", cs.type)

        # not sure about order of test runs here, but it might cause problems
        # for other tests if this layer is misconfigured
        cs.type = "GeoTIFF"
        self.cat.save(cs)

    def testCoverageSave(self):
        # test saving round trip
        rs = self.cat.get_resource("Arc_Sample")
        old_abstract = rs.abstract
        new_abstract = "Not the original abstract"

        # # Change abstract on server
        rs.abstract = new_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(new_abstract, rs.abstract)

        # Restore abstract
        rs.abstract = old_abstract
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual(old_abstract, rs.abstract)

        # Change metadata links on server
        rs.metadata_links = [("text/xml", "TC211",
                              "http://example.com/gsconfig.test.metadata")]
        enabled = rs.enabled
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEqual([
            ("text/xml", "TC211", "http://example.com/gsconfig.test.metadata")
        ], rs.metadata_links)
        self.assertEqual(enabled, rs.enabled)

        srs_before = set(['EPSG:4326'])
        srs_after = set(['EPSG:4326', 'EPSG:3785'])
        formats = set(
            ['ARCGRID', 'ARCGRID-GZIP', 'GEOTIFF', 'PNG', 'GIF', 'TIFF'])
        formats_after = set(["PNG", "GIF", "TIFF"])

        # set and save request_srs_list
        self.assertEquals(set(rs.request_srs_list), srs_before,
                          str(rs.request_srs_list))
        rs.request_srs_list = rs.request_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.request_srs_list), srs_after,
                          str(rs.request_srs_list))

        # set and save response_srs_list
        self.assertEquals(set(rs.response_srs_list), srs_before,
                          str(rs.response_srs_list))
        rs.response_srs_list = rs.response_srs_list + ['EPSG:3785']
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.response_srs_list), srs_after,
                          str(rs.response_srs_list))

        # set and save supported_formats
        self.assertEquals(set(rs.supported_formats), formats,
                          str(rs.supported_formats))
        rs.supported_formats = ["PNG", "GIF", "TIFF"]
        self.cat.save(rs)
        rs = self.cat.get_resource("Arc_Sample")
        self.assertEquals(set(rs.supported_formats), formats_after,
                          str(rs.supported_formats))

    def testWmsStoreCreate(self):
        ws = self.cat.create_wmsstore("wmsstore_gsconfig")
        ws.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        ws.type = "WMS"
        self.cat.save(ws)

    def testWmsLayer(self):
        self.cat.create_workspace("wmstest", "http://example.com/wmstest")
        wmstest = self.cat.get_workspace("wmstest")
        wmsstore = self.cat.create_wmsstore("wmsstore", wmstest)
        wmsstore.capabilitiesURL = "http://suite.opengeo.org/geoserver/ows?service=wms&version=1.1.1&request=GetCapabilities"
        wmsstore.type = "WMS"
        self.cat.save(wmsstore)
        wmsstore = self.cat.get_store("wmsstore")
        self.assertEqual(1, len(self.cat.get_stores(wmstest)))
        available_layers = wmsstore.get_resources(available=True)
        for layer in available_layers:
            # sanitize the layer name - validation will fail on newer geoservers
            name = layer.replace(':', '_')
            new_layer = self.cat.create_wmslayer(wmstest,
                                                 wmsstore,
                                                 name,
                                                 nativeName=layer)
        added_layers = wmsstore.get_resources()
        self.assertEqual(len(available_layers), len(added_layers))

        changed_layer = added_layers[0]
        self.assertEqual(True, changed_layer.advertised)
        self.assertEqual(True, changed_layer.enabled)
        changed_layer.advertised = False
        changed_layer.enabled = False
        self.cat.save(changed_layer)
        self.cat._cache.clear()
        changed_layer = wmsstore.get_resources()[0]
        changed_layer.fetch()
        self.assertEqual(False, changed_layer.advertised)
        self.assertEqual(False, changed_layer.enabled)

    def testFeatureTypeCreate(self):
        shapefile_plus_sidecars = shapefile_and_friends("test/data/states")
        expected = {
            'shp': 'test/data/states.shp',
            'shx': 'test/data/states.shx',
            'dbf': 'test/data/states.dbf',
            'prj': 'test/data/states.prj'
        }

        self.assertEqual(len(expected), len(shapefile_plus_sidecars))
        for k, v in expected.iteritems():
            self.assertEqual(v, shapefile_plus_sidecars[k])

        sf = self.cat.get_workspace("sf")
        self.cat.create_featurestore("states_test", shapefile_plus_sidecars,
                                     sf)

        self.assert_(
            self.cat.get_resource("states_test", workspace=sf) is not None)

        self.assertRaises(
            ConflictingDataError, lambda: self.cat.create_featurestore(
                "states_test", shapefile_plus_sidecars, sf))

        self.assertRaises(
            UploadError, lambda: self.cat.create_coveragestore(
                "states_raster_test", shapefile_plus_sidecars, sf))

        bogus_shp = {
            'shp': 'test/data/Pk50095.tif',
            'shx': 'test/data/Pk50095.tif',
            'dbf': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_featurestore("bogus_shp", bogus_shp, sf))

        lyr = self.cat.get_layer("states_test")
        self.cat.delete(lyr)
        self.assert_(self.cat.get_layer("states_test") is None)

    def testCoverageCreate(self):
        tiffdata = {
            'tiff': 'test/data/Pk50095.tif',
            'tfw': 'test/data/Pk50095.tfw',
            'prj': 'test/data/Pk50095.prj'
        }

        sf = self.cat.get_workspace("sf")
        # TODO: Uploading WorldImage file no longer works???
        # ft = self.cat.create_coveragestore("Pk50095", tiffdata, sf)

        # self.assert_(self.cat.get_resource("Pk50095", workspace=sf) is not None)

        # self.assertRaises(
        #         ConflictingDataError,
        #         lambda: self.cat.create_coveragestore("Pk50095", tiffdata, sf)
        # )

        self.assertRaises(
            UploadError, lambda: self.cat.create_featurestore(
                "Pk50095_vector", tiffdata, sf))

        bogus_tiff = {
            'tiff': 'test/data/states.shp',
            'tfw': 'test/data/states.shx',
            'prj': 'test/data/states.prj'
        }

        self.assertRaises(
            UploadError,
            lambda: self.cat.create_coveragestore("states_raster", bogus_tiff))

    def testLayerSave(self):
        # test saving round trip
        lyr = self.cat.get_layer("states")
        old_attribution = lyr.attribution
        new_attribution = "Not the original attribution"

        # change attribution on server
        lyr.attribution = new_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(new_attribution, lyr.attribution)

        # Restore attribution
        lyr.attribution = old_attribution
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(old_attribution, lyr.attribution)

        self.assertEqual(lyr.default_style.name, "population")

        old_default_style = lyr.default_style
        lyr.default_style = (s for s in lyr.styles
                             if s.name == "pophatch").next()
        lyr.styles = [old_default_style]
        self.cat.save(lyr)
        lyr = self.cat.get_layer("states")
        self.assertEqual(lyr.default_style.name, "pophatch")
        self.assertEqual([s.name for s in lyr.styles], ["population"])

    def testStyles(self):
        # upload new style, verify existence
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Fred", fred.sld_title)

        # replace style, verify changes
        self.cat.create_style("fred",
                              open("test/ted.sld").read(),
                              overwrite=True)
        fred = self.cat.get_style("fred")
        self.assert_(fred is not None)
        self.assertEqual("Ted", fred.sld_title)

        # delete style, verify non-existence
        self.cat.delete(fred, purge=True)
        self.assert_(self.cat.get_style("fred") is None)

        # attempt creating new style
        self.cat.create_style("fred", open("test/fred.sld").read())
        fred = self.cat.get_style("fred")
        self.assertEqual("Fred", fred.sld_title)

        # verify it can be found via URL and check the name
        f = self.cat.get_style_by_url(fred.href)
        self.assert_(f is not None)
        self.assertEqual(f.name, fred.name)

    def testWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("jed",
                              open("test/fred.sld").read(),
                              workspace="topp")

        jed = self.cat.get_style("jed", workspace="blarny")
        self.assert_(jed is None)
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)
        jed = self.cat.get_style("topp:jed")
        self.assert_(jed is not None)
        self.assertEqual("Fred", jed.sld_title)

        # replace style, verify changes
        self.cat.create_style("jed",
                              open("test/ted.sld").read(),
                              overwrite=True,
                              workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assert_(jed is not None)
        self.assertEqual("Ted", jed.sld_title)

        # delete style, verify non-existence
        self.cat.delete(jed, purge=True)
        self.assert_(self.cat.get_style("jed", workspace="topp") is None)

        # attempt creating new style
        self.cat.create_style("jed",
                              open("test/fred.sld").read(),
                              workspace="topp")
        jed = self.cat.get_style("jed", workspace="topp")
        self.assertEqual("Fred", jed.sld_title)

        # verify it can be found via URL and check the full name
        f = self.cat.get_style_by_url(jed.href)
        self.assert_(f is not None)
        self.assertEqual(f.fqn, jed.fqn)

    def testLayerWorkspaceStyles(self):
        # upload new style, verify existence
        self.cat.create_style("ned",
                              open("test/fred.sld").read(),
                              overwrite=True,
                              workspace="topp")
        self.cat.create_style("zed",
                              open("test/ted.sld").read(),
                              overwrite=True,
                              workspace="topp")
        ned = self.cat.get_style("ned", workspace="topp")
        zed = self.cat.get_style("zed", workspace="topp")
        self.assert_(ned is not None)
        self.assert_(zed is not None)

        lyr = self.cat.get_layer("states")
        lyr.default_style = ned
        lyr.styles = [zed]
        self.cat.save(lyr)
        self.assertEqual("topp:ned", lyr.default_style)
        self.assertEqual([zed], lyr.styles)

        lyr.refresh()
        self.assertEqual("topp:ned", lyr.default_style.fqn)
        self.assertEqual([zed.fqn], [s.fqn for s in lyr.styles])

    def testWorkspaceCreate(self):
        ws = self.cat.get_workspace("acme")
        self.assertEqual(None, ws)
        self.cat.create_workspace("acme", "http://example.com/acme")
        ws = self.cat.get_workspace("acme")
        self.assertEqual("acme", ws.name)

    def testWorkspaceDelete(self):
        self.cat.create_workspace("foo", "http://example.com/foo")
        ws = self.cat.get_workspace("foo")
        self.cat.delete(ws)
        ws = self.cat.get_workspace("foo")
        self.assert_(ws is None)

    def testWorkspaceDefault(self):
        # save orig
        orig = self.cat.get_default_workspace()
        neu = self.cat.create_workspace("neu", "http://example.com/neu")
        try:
            # make sure setting it works
            self.cat.set_default_workspace("neu")
            ws = self.cat.get_default_workspace()
            self.assertEqual('neu', ws.name)
        finally:
            # cleanup and reset to the way things were
            self.cat.delete(neu)
            self.cat.set_default_workspace(orig.name)
            ws = self.cat.get_default_workspace()
            self.assertEqual(orig.name, ws.name)

    def testFeatureTypeDelete(self):
        pass

    def testCoverageDelete(self):
        pass

    def testDataStoreDelete(self):
        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)
        states.enabled = False
        self.assert_(states.enabled == False)
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == False)

        states.enabled = True
        self.cat.save(states)

        states = self.cat.get_store('states_shapefile')
        self.assert_(states.enabled == True)

    def testLayerGroupSave(self):
        tas = self.cat.get_layergroup("tasmania")

        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

        tas.layers = tas.layers[:-1]
        tas.styles = tas.styles[:-1]

        self.cat.save(tas)

        # this verifies the local state
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

        # force a refresh to check the remote state
        tas.refresh()
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None], tas.styles)

    def testTimeDimension(self):
        sf = self.cat.get_workspace("sf")
        files = shapefile_and_friends(
            os.path.join(gisdata.GOOD_DATA, "time", "boxes_with_end_date"))
        self.cat.create_featurestore("boxes_with_end_date", files, sf)

        get_resource = lambda: self.cat._cache.clear() or self.cat.get_layer(
            'boxes_with_end_date').resource

        # configure time as LIST
        resource = get_resource()
        timeInfo = DimensionInfo("time",
                                 "true",
                                 "LIST",
                                 None,
                                 "ISO8601",
                                 None,
                                 attribute="date")
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual("LIST", timeInfo.presentation)
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual("date", timeInfo.attribute)
        self.assertEqual("ISO8601", timeInfo.units)

        # disable time dimension
        timeInfo = resource.metadata['time']
        timeInfo.enabled = False
        # since this is an xml property, it won't get written unless we modify it
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(False, timeInfo.enabled)

        # configure with interval, end_attribute and enable again
        timeInfo.enabled = True
        timeInfo.presentation = 'DISCRETE_INTERVAL'
        timeInfo.resolution = '3 days'
        timeInfo.end_attribute = 'enddate'
        resource.metadata = {'time': timeInfo}
        self.cat.save(resource)
        # and verify
        resource = get_resource()
        timeInfo = resource.metadata['time']
        self.assertEqual(True, timeInfo.enabled)
        self.assertEqual('DISCRETE_INTERVAL', timeInfo.presentation)
        self.assertEqual('3 days', timeInfo.resolution_str())
        self.assertEqual('enddate', timeInfo.end_attribute)

    def testImageMosaic(self):
        # testing the mosaic creation
        name = 'cea_mosaic'
        data = open('test/data/mosaic/cea.zip', 'rb')
        self.cat.create_imagemosaic(name, data)

        # get the layer resource back
        self.cat._cache.clear()
        resource = self.cat.get_layer(name).resource

        self.assert_(resource is not None)

        # delete granule from mosaic
        coverage = name
        store = name
        granule_id = name + '.1'
        self.cat.mosaic_delete_granule(coverage, store, granule_id)
Beispiel #33
0
import requests
from geoserver.catalog import Catalog

# Set cat variable
cat = Catalog('http://url/geoserver/rest/',
              username="******",
              password="******")

# Create Workspace
#ws = cat.create_workspace('ws_name','http://uri/ws_name')
ws = cat.get_workspace('ws_name')

# Create Store from PostGIS
ds = cat.create_datastore('db_name', 'ws_name')

ds.connection_parameters.update(host='host',
                                port='5432',
                                database='db_name',
                                user='******',
                                passwd='password',
                                dbtype='postgis',
                                schema='public')

cat.save(ds)

# Add Layers from Postgres
ft = cat.publish_featuretype('table_name', ds, 'EPSG:3857', srs='EPSG:3857')

cat.save(ft)