Beispiel #1
0
def set_style(layer_name, style):
    cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD)
    layer = cat.get_layer(layer_name)
    
    style = cat.get_style(style)
    layer.default_style = style
    cat.save(layer)
def reload_config(instances):
    """Perform requests against GeoServer REST API to reload configurations on slaves

    Optional environment variables:
    GS_RELOAD_INTERVAL: time in seconds to delay between slave instance reloads
    GS_PROTOCOL: protocol prefix, should be set to 'HTTP' or 'HTTPS'
    GS_RELATIVE_URL: relative URL to GeoServer REST API

    :param instances: list of hostname:port requests
    :return:
    """

    logging.info('Sending reload requests separated by %s second delay' % RELOAD_INTERVAL)

    for instance in instances:
        url = '%s://%s%s' % (PROTOCOL, instance, RELATIVE_URL)
        logging.info('Performing GeoServer configuration reload at: %s' % url)
        catalog = Catalog(url)
        result = catalog.reload()
        if result and result[0]:
            if 200 <= result[0].status < 300:
                logging.info('Successful configuration reload.')
            else:
                raise Exception('Failure processing reload with status %s and reason: %s' %
                                (result[0].status, result[0].reason))
        else:
            logging.error('No result received from reload request!')

        sleep(RELOAD_INTERVAL)
Beispiel #3
0
 def addCatalog(self, explorer):         
     dlg = DefineCatalogDialog(explorer)
     dlg.exec_()        
     if dlg.ok:
         try:
             cat = Catalog(dlg.url, dlg.username, dlg.password)               
             v = cat.gsversion()
             supported = v.startswith("2.3") or v.startswith("2.4")
             if not supported:
                 ret = QtGui.QMessageBox.warning(explorer, "GeoServer catalog definition",
                                 "The specified catalog seems to be running an older version of GeoServer\n"
                                 "That might cause unexpected behaviour.\nDo you want to add the catalog anyway?",
                                 QtGui.QMessageBox.Yes | QtGui.QMessageBox.No,                                
                                 QtGui.QMessageBox.No);
                 if ret == QtGui.QMessageBox.No:
                     return                                                                
             self.catalogs[dlg.name] = cat   
             self.catalog = cat      
             self.comboBox.addItem(dlg.name, cat)                          
             self.layersAction.setEnabled(True)
             self.workspacesAction.setEnabled(True)
             self.stylesAction.setEnabled(True)
             self.groupsAction.setEnabled(True)
             self.gwcAction.setEnabled(True)
             self.wpsAction.setEnabled(True)
             self.refreshButton.setEnabled(True)
             self.updateActionButtons()
             
             self.fillTree()
             self.layersAction.trigger() 
             self.explorer.updateQgisContent() 
         except Exception, e:
             explorer.setError(str(e))
             return                   
Beispiel #4
0
 def createDataStore(self, options):
     try:
         cat = Catalog(self.geoserver_rest_url, options["geoserveradmin"], options["gpw"])
         # create datastore for URB schema
         try:
             ws = cat.create_workspace(options["alias"], options["uri"])
         except Exception as e:
             raise Exception("Le nom du workspace ou l'alias est déja utiliser")
         try:
             ds = cat.create_datastore(options["alias"], ws)
             ds.connection_parameters.update(
                 host=options["urbanUrl"],
                 port=options["dbport"],
                 database=options["database"],
                 user=options["postuser"],
                 passwd=options["ropw"],
                 dbtype="postgis",
             )
             cat.save(ds)
         except Exception as e:
             print(str(e))
             raise Exception("Erreur de connexion au Geoserver lors de la création du DataStore")
     except Exception as e:
         raise Exception(str(e))
     return ws.name, ds.name, ds.resource_type
def createGeoServerCatalog(service_url = "http://localhost:8080/geoserver/rest",
                           username="******",
                           password="******",
                           authid=None,
                           authtype=None,
                           disable_ssl_certificate_validation=False):
    # if not authid use basic auth
    if not authid or not authtype:
        catalog = GSCatalog(service_url, username, password, disable_ssl_certificate_validation)
    elif QGis.QGIS_VERSION_INT < 21200:
        # if autcfg, then get certs and ca and create a PKICatalog
        certfile, keyfile, cafile = pem.getPemPkiPaths(authid, authtype)

        # set connection
        catalog = PKICatalog(service_url, keyfile, certfile, cafile)

        # set authid parameter used by uri.py functions to manage
        # uri creation with pki credential
        catalog.authid = authid
    else:
        # For QGIS > 2.12, use the new AuthCatalog and QgsNetworkAccessManager
        settings = QtCore.QSettings()
        cache_time = int(settings.value("/GeoServer/Settings/GeoServer/AuthCatalogXMLCacheTime", 180, int))
        catalog = AuthCatalog(service_url, authid, cache_time)

    return CatalogWrapper(catalog)
Beispiel #6
0
 def layer_group(self):
     """
     Returns layer group name from local OWS for this map instance.
     """
     cat = Catalog(ogc_server_settings.rest, _user, _password)
     lg_name = '%s_%d' % (slugify(self.title), self.id)
     return cat.get_layergroup(lg_name)
Beispiel #7
0
def export_layer(db_info, gs_info, data_info, layer_name):
    """\
    This takes the parameters and performs the export.

    This can be run as a module or called from other code.

    """

    geoValidate(layer_name)
    layer = explodeGeo(layer_name)
    (spaRef, native_crs) = get_srs(layer)

    cat = Catalog(*tuple(gs_info))
    wspace = cat.create_workspace(data_info.workspace, data_info.namespace)
    datastore = create_datastore(cat, wspace, db_info, data_info)

    with closing(createConnObj(db_info._replace(database='postgres'))) as cxn:
        createNewDb(cxn, db_info.database)
    quick_export(layer, db_info)
    create_postgis_layer(
            cat, wspace, datastore, layer, spaRef, native_crs, db_info,
            )
    try:
        arcpy.DeleteFeatures_management(layer)
    except:
        log(arcpy.GetMessages())
Beispiel #8
0
    def publish_layer_group(self):
        """
        Publishes local map layers as WMS layer group on local OWS.
        """
        # temporary permission workaround: 
        # only allow public maps to be published
        if not self.is_public:
            return 'Only public maps can be saved as layer group.'

        map_layers = MapLayer.objects.filter(map=self.id)
        
        # Local Group Layer layers and corresponding styles
        layers = []
        lg_styles = []
        for ml in map_layers:
            if ml.local:
                layer = Layer.objects.get(typename=ml.name)
                style = ml.styles or getattr(layer.default_style, 'name', '')
                layers.append(layer)
                lg_styles.append(style)
        lg_layers = [l.name for l in layers]

        # Group layer bounds and name             
        lg_bounds = [str(coord) for coord in self.bbox] 
        lg_name = '%s_%d' % (slugify(self.title), self.id)

        # Update existing or add new group layer
        cat = Catalog(ogc_server_settings.rest, _user, _password)
        lg = self.layer_group
        if lg is None:
            lg = GsUnsavedLayerGroup(cat, lg_name, lg_layers, lg_styles, lg_bounds)
        else:
            lg.layers, lg.styles, lg.bounds = lg_layers, lg_styles, lg_bounds
        cat.save(lg)
        return lg_name
Beispiel #9
0
def upload_impact_vector(impact_file_path):
    data = dict()
    try:
        cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD)
        ws = cat.get_workspace(GS_IMPACT_WS)

        base = str(os.path.splitext(impact_file_path)[0])
        name = str(os.path.splitext(os.path.basename(base))[0])

        shx = base + '.shx'
        dbf = base + '.dbf'
        prj = base + '.prj'
        data = { 
                 'shp' : impact_file_path,
                 'shx' : shx,
                 'dbf' : dbf,
                 'prj' : prj
               }

        cat.create_featurestore(name, data, ws, True)

        data = {'return': 'success', 'resource' : name }
    except:
        return {'return': 'fail'}
    else:
        return data
class TestCatalogBase(unittest.TestCase):

    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        sql = """SELECT relname FROM pg_class
                    WHERE relkind = 'r' AND relname !~ '^(pg_|sql_)'
                        AND relname != 'spatial_ref_sys';"""
        conn = psycopg2.connect(
            database=_DATABASE,
            user=_USER,
            password=_PASSWORD,
            host=_HOST,
            port=_PORT
        )
        cur = conn.cursor()
        cur.execute(sql)
        tables = [r[0] for r in cur.fetchall()]
        if len(tables) == 0:
            return
        cur.execute("DROP TABLE {};".format(','.join(tables)))
        conn.commit()

    def setUp(self):
        self.catalog = Catalog(settings.SERVICE_URL)
        self.workspace = self.catalog.create_workspace("gsconfig_test")

    def tearDown(self):
        self.catalog.delete(self.workspace, recurse=True)
    def _get_layer_style(self,resource_id):
	geoserver_url=config['ckanext-vectorstorer.geoserver_url']
	
        cat = Catalog(geoserver_url+"/rest")
	layer = cat.get_layer(c.layer_id)
	default_style=layer._get_default_style()
	xml =  minidom.parseString(default_style.sld_body)
	return xml.toprettyxml()
class UploaderTests(MapStoryTestMixin):

    """
    Basic checks to make sure pages load, etc.
    """

    def create_datastore(self, connection, catalog):
        settings = connection.settings_dict
        params = {'database': settings['NAME'],
                  'passwd': settings['PASSWORD'],
                  'namespace': 'http://www.geonode.org/',
                  'type': 'PostGIS',
                  'dbtype': 'postgis',
                  'host': settings['HOST'],
                  'user': settings['USER'],
                  'port': settings['PORT'],
                  'enabled': "True"}

        store = catalog.create_datastore(
            settings['NAME'],
            workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(settings['NAME'])

    def setUp(self):

        if not os.path.exists(os.path.join(os.path.split(__file__)[0], '..', 'importer-test-files')):
            self.skipTest('Skipping test due to missing test data.')

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.username, self.password = self.create_user(
            'admin', 'admin', is_superuser=True)
        self.non_admin_username, self.non_admin_password = self.create_user(
            'non_admin', 'non_admin')
        self.cat = Catalog(
            ogc_server_settings.internal_rest,
            *ogc_server_settings.credentials)
        if self.cat.get_workspace('geonode') is None:
            self.cat.create_workspace('geonode', 'http://geonode.org')
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.cat)

    def tearDown(self):
        """
        Clean up geoserver.
        """
        self.cat.delete(self.datastore, recurse=True)
Beispiel #13
0
    def geoserver_upload_shape(self, input_file):
        print "upload", input_file

        path, file_ext = os.path.split(input_file)

        cat = Catalog(self.GEOSERVER_HOST, self.GEOSERVER_USER, self.GEOSERVER_PASSWORD)
        shapeFile = shapefile_and_friends(input_file)
        print shapeFile
        cat.create_featurestore(name=file_ext, data=shapeFile, workspace=cat.get_workspace(self.GEOSERVER_WORKSPACE),
                                overwrite=True, charset=None)
Beispiel #14
0
def layer_exists(layer_name, store, workspace):
    _user, _password = ogc_server_settings.credentials
    url = ogc_server_settings.rest
    gs_catalog = Catalog(url, _user, _password)
    try:
        layer = gs_catalog.get_resource(layer_name, store=store,
                                        workspace=workspace)
        return layer is not None
    except FailedRequestError:
        return False
Beispiel #15
0
def add_layer(shp):
  global ds_name, ws_name, appserver
  global instance
  cat = Catalog(appserver + '/rest')
  ws = cat.get_workspace(ws_name)
  ds = cat.get_store(ds_name, ws_name)
  print 'starting {0}...'.format(shp)
  components =  dict((ext, shp + '.' + ext) for ext in ['shp', 'prj', 'shx', 'dbf'])
  cat.add_data_to_store(ds, shp, components, ws, True)
  print '{0} finished ...'.format(shp)
def raster_to_geoserver(geoserver_url, raster, workspace):
    cat = Catalog(geoserver_url, username, password)
    create_workspace(geoserver_url, username, password, workspace)
    file_name = os.path.splitext(os.path.basename(raster))[0]
    workspace = cat.get_workspace(workspace)
    if not file_name in [l.name for l in cat.get_layers()]:
        print 'Creating new layer: %s' %file_name
        cat.create_coveragestore(file_name, raster, workspace)
    else:
        print 'Layer %s already exists' %file_name
Beispiel #17
0
def main(options):
  #connect to geoserver
  cat = Catalog("http://localhost:8080/geoserver/rest", "admin", options.gpw)
  
  #create datrastore for URB schema
  ws = cat.create_workspace(options.alias,'imio.be')
  
  ds = cat.create_datastore(options.alias, ws)
  ds.connection_parameters.update(
      host=options.urbanUrl,
      port="5432",
      database=options.database,
      user="******",
      passwd=options.ropw,
      dbtype="postgis")
  
  cat.save(ds)
  ds = cat.get_store(options.alias)
  
  #config object
  urb = {
  	"capa":"Parcelles",
  	"toli":"cadastre_ln_toponymiques",
  	"canu":"cadastre_pt_num",
  	"cabu":"Batiments",
  	"gept":"cadastre_points_generaux",
  	"gepn":"cadastre_pol_gen",
  	"inpt":"point",
  	"geli":"cadastre_ln_generales",
  	"inli":"cadastre_ln_informations",
  	"topt":"point",
  	}   	
  	
  #connect to tables and create layers and correct urban styles
  for table in urb:
    style = urb[table]
    ft = cat.publish_featuretype(table, ds, 'EPSG:31370', srs='EPSG:31370')
    ft.default_style = style
    cat.save(ft)
    resource = ft.resource
    resource.title = options.alias+"_"+table
    resource.save()
    
    layer, created = Layer.objects.get_or_create(name=layerName, defaults={
      	            "workspace": ws.name,
                    "store": ds.name,
                    "storeType": ds.resource_type,
                    "typename": "%s:%s" % (ws.name.encode('utf-8'), resource.name.encode('utf-8')),
                    "title": resource.title or 'No title provided',
                    "abstract": resource.abstract or 'No abstract provided',
                    #"owner": owner,
                    "uuid": str(uuid.uuid4()),
                    "bbox_x0": Decimal(resource.latlon_bbox[0]),
                    "bbox_x1": Decimal(resource.latlon_bbox[1]),
                    "bbox_y0": Decimal(resource.latlon_bbox[2]),
                    "bbox_y1": Decimal(resource.latlon_bbox[3])
      	         })
    set_attributes(layer, overwrite=True)
    if created: layer.set_default_permissions()
def shp_to_geoserver(geoserver_url, shapefile, workspace):
    cat = Catalog(geoserver_url, username, password)
    create_workspace(geoserver_url, username, password, workspace)
    shapefile_plus_sidecars = geoserver.util.shapefile_and_friends(shapefile)
    file_name = os.path.splitext(os.path.basename(shapefile))[0]
    workspace = cat.get_workspace(workspace)
    if not file_name in [l.name for l in cat.get_layers()]:
        print 'Creating new layer: %s' %file_name
        cat.create_featurestore(file_name, shapefile_plus_sidecars, workspace)
    else:
        print 'Layer %s already exists' %file_name
Beispiel #19
0
def upload_raster(file_path, workspace):
    data = dict()
    try:
        cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD)

        base = str(os.path.splitext(file_path)[0])
        name = str(os.path.splitext(os.path.basename(base))[0])

        cat.create_coveragestore(name, file_path, workspace, True)
    except:
        raise
Beispiel #20
0
def upload_style(style_name):
    try:
        cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD)

        style_filename = style_name + '.sld'
        style_file_path = str(os.path.join(DATA_PATH, 'styles', style_filename))

        with open(style_file_path, 'r') as style:
            cat.create_style(style_name, style.read(), overwrite=True)
            style.close()
    except:
        raise
Beispiel #21
0
def get_stores(store_type = None):
    cat = Catalog(ogc_server_settings.rest, _user, _password)
    stores = cat.get_stores()
    store_list = []
    for store in stores:
        store.fetch()
        stype = store.dom.find('type').text.lower()
        if store_type and store_type.lower() == stype:
            store_list.append({'name':store.name, 'type': stype})
        elif store_type is None:
            store_list.append({'name':store.name, 'type': stype})
    return store_list
Beispiel #22
0
def upload_raster(impact_file_path):
    data = dict()
    try:
        cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD)
        ws = cat.get_workspace(GS_IMPACT_WS)

        base = str(os.path.splitext(impact_file_path)[0])
        name = str(os.path.splitext(os.path.basename(base))[0])

        cat.create_coveragestore(name, impact_file_path, ws, True)
    except:
        raise
Beispiel #23
0
def get_stores(store_type = None):
    url = "%srest" % settings.GEOSERVER_BASE_URL
    cat = Catalog(url, _user, _password) 
    stores = cat.get_stores()
    store_list = []
    for store in stores:
        store.fetch()
        stype = store.dom.find('type').text.lower()
        if store_type and store_type.lower() == stype:
            store_list.append({'name':store.name, 'type': stype})
        elif store_type is None:
            store_list.append({'name':store.name, 'type': stype})
    return store_list
    def updateBbox(self):
        sql = u"with geom_union as (select st_envelope(st_union(the_geom)) as geom from uos_fast) " \
              u"select st_xmin(geom), st_xmax(geom), st_ymin(geom), st_ymax(geom) from geom_union"
        self.cur.execute(sql)

        result = self.cur.fetchone()

        cat = Catalog("http://localhost:8081/geoserver/rest",username ="******",password="******")
        coverage = cat.get_resource_by_url("http://localhost:8081/geoserver/rest/workspaces/uos/coveragestores/"
                                           "uos_image/coverages/uos_fast.xml")

        coverage.native_bbox = (str(result[0]),str(result[1]),str(result[2]),str(result[3]),"EPSG:4326")
        cat.save(coverage)
Beispiel #25
0
def pre_save_maplayer(instance, sender, **kwargs):
    # If this object was saved via fixtures,
    # do not do post processing.
    if kwargs.get('raw', False):
        return

    try:
        c = Catalog(ogc_server_settings.rest, _user, _password)
        instance.local = isinstance(c.get_layer(instance.name),GsLayer)
    except EnvironmentError, e:
        if e.errno == errno.ECONNREFUSED:
            msg = 'Could not connect to catalog to verify if layer %s was local' % instance.name
            logger.warn(msg, e)
        else:
            raise e
    def updateBbox(self):
        sql = u"with geom_union as (select st_envelope(st_union(the_geom)) as geom from " + self.DATABASE_TABLE_NAME + u") " \
              u"select st_xmin(geom), st_xmax(geom), st_ymin(geom), st_ymax(geom) from geom_union"
        self.cur.execute(sql)

        result = self.cur.fetchone()

        #self.cur.close()

        cat = Catalog(self.HTTP + self.GEOSERVER_URL + "/geoserver/rest",username = self.GEOSERVER_USER, password=self.GEOSERVER_PASSWORD)
        print self.HTTP + self.GEOSERVER_URL + "/geoserver/rest/workspaces/uos/coveragestores/" + "uos_upload_image/coverages/" + self.DATABASE_TABLE_NAME + ".xml"
        coverage = cat.get_resource_by_url(self.HTTP + self.GEOSERVER_URL + "/geoserver/rest/workspaces/uos/coveragestores/" + "uos_upload_image/coverages/" + self.GEOSERVER_LAYER_NAME + ".xml")

        coverage.native_bbox = (str(result[0]),str(result[1]),str(result[2]),str(result[3]),"EPSG:4326")
        cat.save(coverage)
Beispiel #27
0
    def __init__(self, gs_url, dico_gs, tipo, txt=''):
        u""" Uses OGR functions to extract basic informations about
        geographic Web Features Services.

        gs_url = url of a WFS service
        dico_wfs = dictionary for global informations
        dico_fields = dictionary for the fields' informations
        li_fieds = ordered list of fields
        tipo = format
        text = dictionary of text in the selected language

        """
        cat = Catalog(gs_url, "####", "####")
        layers = cat.get_layers()
        for layer in layers:
            print(layer.name)
Beispiel #28
0
def make_style(style_name, style_info):
    cat = Catalog(GEOSERVER_REST_URL, GS_USERNAME, GS_PASSWORD)
    style_dir = path('data/styles')
    if not style_dir.exists():
        style_dir.makedirs()

    style_filename = style_name + '.sld'
    style_file_path = str(os.path.join(DATA_PATH, 'styles', style_filename))

    target_field = style_info['target_field']

    sld_doc = StyledLayerDescriptor()
    nl = sld_doc.create_namedlayer(style_name)
    us = nl.create_userstyle()
    feature_style = us.create_featuretypestyle()

    for x in style_info['style_classes']:
        elem = feature_style._node.makeelement('{%s}Rule' % SLDNode._nsmap['sld'], nsmap=SLDNode._nsmap)
        feature_style._node.append(elem)

        r = Rule(feature_style, len(feature_style._node)-1)
        r.Title = x['label']

        f1 = Filter(r)
        f1.PropertyIsGreaterThanOrEqualTo = PropertyCriterion(f1, 'PropertyIsGreaterThanOrEqualTo')
        f1.PropertyIsGreaterThanOrEqualTo.PropertyName = target_field
        f1.PropertyIsGreaterThanOrEqualTo.Literal = str(round(x['min']))

        f2 = Filter(r)
        f2.PropertyIsLessThan = PropertyCriterion(f2, 'PropertyIsLessThan')
        f2.PropertyIsLessThan.PropertyName = target_field
        f2.PropertyIsLessThan.Literal = str(round(x['max']))

        r.Filter = f1 + f2

        sym = PolygonSymbolizer(r)
        fill = Fill(sym)
        fill.create_cssparameter('fill', x['colour'])
        fill.create_cssparameter('fill-opacity', '0.5')

    with open(style_file_path, 'w') as style:
        style.write(sld_doc.as_sld(True))
        style.close()

    with open(style_file_path, 'r') as style:
        cat.create_style(style_name, style.read(), overwrite=True)
        style.close()
Beispiel #29
0
def pre_delete_service(instance, sender, **kwargs):
    for layer in instance.layer_set.all():
        layer.delete()
    # if instance.method == 'H':
    #     gn = Layer.objects.gn_catalog
    #     gn.control_harvesting_task('stop', [instance.external_id])
    #     gn.control_harvesting_task('remove', [instance.external_id])
    if instance.method == 'C':
        try:
            _user = settings.OGC_SERVER['default']['USER']
            _password = settings.OGC_SERVER['default']['PASSWORD']
            gs = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                          _user , _password)
            cascade_store = gs.get_store(instance.name, settings.CASCADE_WORKSPACE)
            gs.delete(cascade_store, recurse=True)
        except FailedRequestError:
            logger.error("Could not delete cascading WMS Store for %s - maybe already gone" % instance.name)
Beispiel #30
0
            def wrapper(self, *args, **kwargs):
                workspace_name = 'geonode'
                django_datastore = db.connections['datastore']

                catalog = Catalog(
                    ogc_server_settings.internal_rest,
                    *ogc_server_settings.credentials
                )
                # Set up workspace/datastore as appropriate
                ws = catalog.get_workspace(workspace_name)
                delete_ws = False
                if ws is None:
                    ws = catalog.create_workspace(workspace_name, 'http://www.geonode.org/')
                    delete_ws = True

                datastore = create_datastore(workspace_name, django_datastore, catalog)

                # test method called here
                try:
                    ret = wrapped_func(self, *args, **kwargs)
                finally:
                    # Tear down workspace/datastore as appropriate
                    if delete_ws:
                        catalog.delete(ws, recurse=True)
                    else:
                        catalog.delete(datastore, recurse=True)

                return ret
Beispiel #31
0
    def synchronizeLayers(self):
        '''
        Synchronizes layers/rules in the postgresql database, Geoserver, and Geofence. All three will match postgresql database.
        '''

        to_return = True

        permanent_gs_layers = [
            'rapid_geoview_layers', 'rapid_feature', 'rapid_geoview'
        ]
        rapid_layers = {
            lyr.uid
            for lyr in DataLayer.objects.all() if lyr.feature_set.count() > 0
        }

        cat = Catalog(GEOSERVER_REST_ENDPOINT)
        store = cat.get_store('RapidData')
        gs_layers = {
            lyr.name
            for lyr in store.get_resources()
            if lyr.name not in permanent_gs_layers
        }

        gf_rules_ids = {
            (rule.find('layer').text, int(rule.find('id').text))
            for rule in ET.fromstring(getGeofenceRules('admin').content)[:-1]
        }
        gf_rules = {rule[0] for rule in gf_rules_ids}

        gs_layers_to_remove = gs_layers.difference(rapid_layers)
        gs_layers_to_add = rapid_layers.difference(gs_layers)

        gf_rules_to_remove = gf_rules.difference(rapid_layers)
        gf_rules_to_add = rapid_layers.difference(gf_rules)

        if len(gs_layers_to_add) == 0 and len(gs_layers_to_remove) == 0:
            if len(gf_rules_to_add) == 0 and len(gf_rules_to_remove) == 0:
                return to_return

        elif len(gs_layers_to_remove) > 0:

            for lyr_uid in gs_layers_to_remove:
                print "Removing Geoserver layer {0}. . .".format(lyr_uid)
                response = self.removeFeatureType(lyr_uid)
                if response.status_code == 200:
                    print "Success!"
                else:
                    print "Failed to remove layer {0}".format(lyr_uid)
                    to_return = False
                    print response.content

        elif len(gs_layers_to_add) > 0:

            for uid in gs_layers_to_add:
                print "Adding FeatureType to Geoserver: {0}".format(uid)
                xml = self.createFeatureTypeFromUid(uid)
                response = self.sendFeatureType(xml)
                if response.status_code == 200:
                    print "Success!"
                else:
                    print "Failed to add FeatureType {0}".format(uid)
                    to_return = False
                    print response.content

        if len(gf_rules_to_add) == 0 and len(gf_rules_to_remove) == 0:
            return to_return

        elif len(gf_rules_to_remove) > 0:

            gf_ids_to_remove = [
                rule for rule in gf_rules_ids if rule[0] in gf_rules_to_remove
            ]

            for rule in gf_ids_to_remove:
                print "Removing Geofence rule for layer {0}".format(rule[0])
                response = removeGeofenceRule(rule[1])
                if response.status_code == 200:
                    print "Success!"
                else:
                    print "Failed to remove layer {0}".format(lyr_uid)
                    to_return = false
                    print response.content

        elif len(gf_rules_to_add) > 0:
            for rule in gf_rules_to_add:
                response = addGeofenceRule(layer=rule)
                if response.status_code == 200:
                    print "Success!"
                else:
                    print "Failed to add Geofence rule for layer {0}".format(
                        rule)
                    to_return = False
                    print response.content

        return to_return
class ArchiveAndCatalogueI(geocloud.ArchiveAndCatalogue):
    def __init__(self, com):
        if not com:
            raise RuntimeError("Not communicator")
        self.com = com
        q = com.stringToProxy('IceGrid/Query')
        self.query = IceGrid.QueryPrx.checkedCast(q)
        if not self.query:
            raise RuntimeError("Invalid proxy")
        self.broker = geocloud.BrokerPrx.checkedCast(
            com.stringToProxy("broker"))
        self.sem = Ice.threading.RLock(
        )  #Lock for managing the data structures
        geoserver_path = "http://localhost:80/geoserver/rest"
        self.catalog = Catalog(geoserver_path)
        self.workspace = None

    def setBroker(self, broker, current=None):
        self.broker = broker

    def log(self, log):
        if self.broker:
            self.broker.begin_appendLog("<ArchiveAndCatalogue> " + str(log))
        else:
            print log

    def createScenario(self, scenario, current=None):
        self.sem.acquire()
        try:
            if self.catalog == None:
                self.log("Catalog")
                raise geocloud.CreationScenarioException()
            try:
                self.workspace = self.catalog.create_workspace(
                    scenario, scenario)
                self.workspace.enabled = True
                self.log("Created Scenario %s" % (scenario))
            except Exception:
                print("workspace does not created")
                self.log("Workspace does not created")
        except FailedRequestError:
            self.log("Request failed")
            raise geocloud.CreationScenarioException()
        finally:
            self.sem.release()

    def catalogue(self, path, store, scenario, current=None):

        if self.workspace == None:
            raise geocloud.CataloguingException()
        else:
            try:
                if self.catalog == None:
                    raise geocloud.CataloguingException()
            # store = path.split('/')[-1]
                self.sem.acquire()
                cv = self.catalog.create_coveragestore(store, path,
                                                       self.workspace)
                self.log("%s Catalogued!" % (path))
            except ConflictingDataError as e:
                self.log(e)
            except UploadError as e:
                self.log(e)
            except FailedRequestError as e:
                None
            except Exception as e:
                self.log(e)
            finally:
                self.sem.release()

    def deleteScenario(self, scenario, current=None):
        self.sem.acquire()
        try:
            for layer in self.catalog.get_layers():
                self.catalog.delete(layer)
            for coverage in self.catalog.get_stores():
                for resource in coverage.get_resources():
                    catalog.delete(resource)
                catalog.delete(coverage)
            for vk in catalog.get_workspaces():
                catalog.delete(wk)
        except Exception:
            self.log("Exception while cleanning scenario")
Beispiel #33
0
 def setUp(self):
     self.catalog = Catalog(settings.SERVICE_URL)
     self.workspace = self.catalog.create_workspace("gsconfig_test")
Beispiel #34
0
class GeoServer(MapServer):
    def __init__(self, process, sessId):
        MapServer.__init__(self, process, sessId)

        # initial myself
        # the catagory of this geoserver
        self.cat = Catalog(
            '/'.join((config.getConfigValue('geoserver',
                                            'geoserveraddress'), 'rest')),
            config.getConfigValue('geoserver', 'admin'),
            config.getConfigValue('geoserver', 'adminpassword'))

        # get the workspace that datas will be saved and if workspace not set at configure file or
        # not exit then the default workspace will be used
        self.workspace = self.cat.get_workspace(
            config.getConfigValue('geoserver', 'workspace'))

        if not self.workspace:
            self.workspace = self.cat.get_default_workspace()

    def save(self, output):
        MapServer.save(self, output)
        name = "%s-%s-%s" % (output.identifier, self.process.identifier,
                             self.sessId)
        service_url = '/'.join([
            config.getConfigValue('geoserver', 'geoserveraddress'),
            self.workspace.name
        ])

        if self.datatype == "raster":
            # save
            logging.info("upload raster file with name: {0} ".format(name))
            self.cat.create_coveragestore(name, output.value, self.workspace,
                                          False)

            # construct the wcs url
            return urllib2.quote('/'.join((service_url, 'wcs')) +
                                 "?SERVICE=WCS" + "&REQUEST=GetCoverage" +
                                 "&VERSION=1.0.0" + "&COVERAGE=" + name +
                                 "&CRS=" + output.projection +
                                 "&BBOX=%s,%s,%s,%s" %
                                 (output.bbox[0], output.bbox[1],
                                  output.bbox[2], output.bbox[3]) +
                                 "&HEIGHT=%s" % (output.height) + "&WIDTH=%s" %
                                 (output.width) +
                                 "&FORMAT=%s" % output.format["mimetype"])
        elif self.datatype == "vector":
            #save
            logging.info("upload vector file with name: {0} ".format(name))
            zip_shpf, lyname = self.compressFeatureData(output.value)
            self.cat.create_featurestore(name, zip_shpf, self.workspace, False)

            # construct the wfs url
            return urllib2.quote('/'.join((service_url, 'wfs')) +
                                 "?SERVICE=WFS" + "&REQUEST=GetFeature" +
                                 "&VERSION=1.0.0" + "&TYPENAME=" + lyname)
        else:
            return None

    def compressFeatureData(self, s_ftfile):
        """
        @return:  1. the zip file path; 2. layer name at geoserver 
        """
        from osgeo import ogr

        def zipFiles(zipname, files, arcnames):
            assert len(files) == len(
                arcnames), "size of file names and rename container not equal"
            zipf = zipfile.ZipFile(zipname, 'w')
            for i in range(len(files)):
                if os.path.exists(files[i]):
                    zipf.write(files[i], arcnames[i])
            zipf = None

        ft = ogr.Open(s_ftfile)
        ftDrv = ft.GetDriver()

        sft = os.path.splitext(s_ftfile)[0]
        archive_files = [
            sft + '.shp', sft + '.shx', sft + '.prj', sft + '.dbf'
        ]
        filename = os.path.split(sft)[1]
        arcnames = [
            filename + '.shp', filename + '.shx', filename + '.prj',
            filename + '.dbf'
        ]

        logging.info("the driver of vector data {0} is {1}".format(
            s_ftfile, ftDrv.name))
        if (ftDrv.name != "ESRI Shapefile"):
            tempshpf = os.path.join(
                config.getConfigValue('server', 'tempPath'), str(uuid.uuid4()))
            shpDrv = ogr.GetDriverByName("ESRI Shapefile")

            shpft = shpDrv.CopyDataSource(ft, tempshpf + '.shp')
            if not shpft:
                raise exceptions.IOError(
                    "{0} format vector data to shapefile format fault".format(
                        s_ftfile))

            # close the vector datasource
            ft = None
            shpft = None

            # zip shape files and delete them
            # create an defautl prj file for this shapefile if thereis not projection information
            if not os.path.exists(tempshpf + '.prj'):
                f = open(tempshpf + '.prj', 'w')
                f.write(
                    'GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'
                )
                f.close()
            archive_files = [
                tempshpf + '.shp', tempshpf + '.shx', tempshpf + '.prj',
                tempshpf + '.dbf'
            ]
            zipFiles(tempshpf + '.zip', archive_files, arcnames)

            for f in archive_files:
                if os.path.exists(f):
                    os.remove(f)

            return tempshpf + '.zip', filename
        else:
            zipFiles(sft + '.zip', archive_files, arcnames)
            return sft + '.zip', filename
Beispiel #35
0
def main(argv):
    #  --------------------- Handler --------------------- #
    try:
        opts, args = getopt.getopt(argv, "hd:g:t:o:", [
            "help", "test", "date=", "grid=", "target=", "only=", "overwrite"
        ])
    except getopt.GetoptError:
        print('Invalid arguments. Add "-h" or "--help" for help.')
        sys.exit(1)

    date, grid, only, target, overwrite = None, None, None, None, False
    for opt, arg in opts:
        if opt in ("-h", "--help"):
            print('Usage: ./main.py [OPTIONS...]\n\nHelp options:\n\
-h, --help       Show help\n\
--test           Test that all packages and system variables work properly\n\
-d, --date       Set date for all imagery capture (format: yyyy-mm-dd / yyyymmdd)\n\
-g, --grid       Set center grid for sentinel imagery capture (format: east-decimal,north-decimal)\n\
-t, --target     Set path to target directory (format: path/to/target)\n\
-o, --only       Only create specified layer (format: layername)\n\
--overwrite      Overwrite layers in geoserver')
            sys.exit()
        elif opt == ("--test"):
            print(
                "Success! All packages and sytem variables seems to work properly"
            )
            sys.exit()
        elif opt in ("-d", "--date"):
            if '-' in arg:
                date = datetime.strptime(arg, '%Y-%m-%d').date()
            else:
                date = datetime.strptime(arg, '%Y%m%d').date()
        elif opt in ("-g", "--grid"):
            grid = arg
        elif opt in ("-t", "--target"):
            target = arg
        elif opt in ("-o", "--only"):
            only = arg
        elif opt in ("--overwrite"):
            overwrite = True
    if not opts:
        print('WARNING: No arguments provided. Using default values')
    #  ------------------- Handler end ------------------- #

    d = DownloadManager(date=date, grid=grid,
                        target=target)  # Making download instance

    functions = {
        's2c': d.getS2,
        'terramos': d.getTerra,
        's1c': d.getS1,
        's1mos': d.getS1Mos,
        'seaice': d.getSeaIce,
        'icedrift': d.getIceDrift
    }

    #  ------------------ Getting/processing data ------------------ #
    outfiles = []
    if only:
        outfiles.append(functions[only](d.OUTDIR + only + '.tif'))
    else:
        for k, v in functions.iteritems():
            try:
                funcs.printLayerStatus(str(k))
                outfiles.append(v(d.OUTDIR + k + '.tif'))
            except Exception as e:
                print("Could not generate " + k + " layer")
                print(traceback.print_exc())
            finally:
                print("")

    print('Created files: ' + str(outfiles) + '\n')
    if all(file == False for file in outfiles):
        print("No images found")
        exit(1)
    #  ---------------- Getting/processing data end ---------------- #

    # --------------------- Upload to Geoserver -------------------- #
    cat = Catalog("http://localhost:8080/geoserver/rest/", "admin",
                  os.environ["GEOSERVER_SECRET_KEY"])
    print(cat)

    # BUG: ws is sometime cite instead of d.date (fixed?)
    ws = cat.get_workspace(str(d.DATE))
    if ws is None:
        ws = cat.create_workspace(str(d.DATE), str(d.DATE) + 'uri')

    for layerdata in outfiles:
        if layerdata:
            layername = layerdata.split('/')[-1].split('.')[0]
            print('Adding ' + layername + ' to geoserver...')
            cat.create_coveragestore(name=layername,
                                     data=layerdata,
                                     workspace=ws,
                                     overwrite=overwrite)

    print('Sensor data for ' + str(d.DATE) +
          ' has been successfully uploaded!')
Beispiel #36
0
def main(options):
    #connect to geoserver
    cat = Catalog("http://localhost:8080/geoserver/rest", "admin", options.gpw)

    #create datrastore for URB schema
    ws = cat.create_workspace(options.alias, 'imio.be')

    ds = cat.create_datastore(options.alias, ws)
    ds.connection_parameters.update(host=options.urbanUrl,
                                    port="5432",
                                    database=options.database,
                                    user="******",
                                    passwd=options.ropw,
                                    dbtype="postgis")

    cat.save(ds)
    ds = cat.get_store(options.alias)

    #config object
    urb = {
        "capa": "Parcelles",
        "toli": "cadastre_ln_toponymiques",
        "canu": "cadastre_pt_num",
        "cabu": "Batiments",
        "gept": "cadastre_points_generaux",
        "gepn": "cadastre_pol_gen",
        "inpt": "point",
        "geli": "cadastre_ln_generales",
        "inli": "cadastre_ln_informations",
        "topt": "point",
    }

    #connect to tables and create layers and correct urban styles
    for table in urb:
        style = urb[table]
        ft = cat.publish_featuretype(table, ds, 'EPSG:31370', srs='EPSG:31370')
        ft.default_style = style
        cat.save(ft)
        resource = ft.resource
        resource.title = options.alias + "_" + table
        resource.save()

        layer, created = Layer.objects.get_or_create(
            name=layerName,
            defaults={
                "workspace":
                ws.name,
                "store":
                ds.name,
                "storeType":
                ds.resource_type,
                "typename":
                "%s:%s" %
                (ws.name.encode('utf-8'), resource.name.encode('utf-8')),
                "title":
                resource.title or 'No title provided',
                "abstract":
                resource.abstract or 'No abstract provided',
                #"owner": owner,
                "uuid":
                str(uuid.uuid4()),
                "bbox_x0":
                Decimal(resource.latlon_bbox[0]),
                "bbox_x1":
                Decimal(resource.latlon_bbox[1]),
                "bbox_y0":
                Decimal(resource.latlon_bbox[2]),
                "bbox_y1":
                Decimal(resource.latlon_bbox[3])
            })
        set_attributes(layer, overwrite=True)
        if created: layer.set_default_permissions()
Beispiel #37
0
 def __init__(self):
     models.Manager.__init__(self)
     url = "%srest" % settings.GEOSERVER_BASE_URL
     self.gs_catalog = Catalog(url, _user, _password)
Beispiel #38
0
class GeoGigUploaderBase(ImportHelper):
    def __init__(self, *args, **kwargs):
        super(GeoGigUploaderBase, self).__init__(*args, **kwargs)
        setUpModule()  # this isn't available when being used in other module

    def setUp(self):
        self.admin_user = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_user = self.create_user('non_admin', 'non_admin')
        self.catalog = Catalog(ogc_server_settings.internal_rest,
                               *ogc_server_settings.credentials)
        if self.catalog.get_workspace('geonode') is None:
            self.catalog.create_workspace('geonode', 'http://www.geonode.org/')
        self.workspace = 'geonode'
        self.datastoreNames = []

    def tearDown(self):
        """Clean up geoserver/geogig catalog.
        """
        # delete stores (will cascade to delete layers)
        for store_name in self.datastoreNames:
            self.catalog.delete(self.catalog.get_store(store_name),
                                recurse=True)

        # delete repository reference in geoserver
        for store_name in self.datastoreNames:
            self.remove_geogig_repo(store_name)

        # geoserver can leave connections open - HACK HACK HACK
        self.free_geogig_connections()

    # HACK HACK HACK -- sometimes connections from geoserver to geogig are left open.  This kills the postgresql backend!
    #  this is a major hammer.  Once geoserver/geogig are better at cleaning up, remove this.
    def free_geogig_connections(self):
        with db.connections["geogig"].cursor() as c:
            c.execute(
                "select pg_terminate_backend(pid) from pg_stat_activity where application_name = 'PostgreSQL JDBC Driver' or application_name='geogig'"
            )

    # aggressive delete of the repo (mostly cleans up after itself)
    #  call the geogig rest API DELETE
    def remove_geogig_repo(self, ref_name):
        username = ogc_server_settings.credentials.username
        password = ogc_server_settings.credentials.password
        url = ogc_server_settings.rest
        http = httplib2.Http(disable_ssl_certificate_validation=False)
        http.add_credentials(username, password)
        netloc = urlparse(url).netloc
        http.authorizations.append(
            httplib2.BasicAuthentication((username, password), netloc, url, {},
                                         None, None, http))
        rest_url = ogc_server_settings.LOCATION + "geogig/repos/" + ref_name + "/delete.json"
        resp, content = http.request(rest_url, 'GET')
        response = json.loads(content)
        token = response["response"]["token"]
        rest_url = ogc_server_settings.LOCATION + "geogig/repos/" + ref_name + "?token=" + token
        resp, content = http.request(rest_url, 'DELETE')

    # convenience method to load in the test dataset
    # return a (geonode) layer
    # the layer will be in Geoserver and Geonode
    # self.catalog.get_layer(layer.name) -- to get the Geoserver Layer
    def fully_import_file(self,
                          path,
                          fname,
                          start_time_column,
                          end_time_column=None):
        # setup time
        if end_time_column is None:
            time_config = {
                'convert_to_date': [start_time_column],
                'start_date': start_time_column,
                'configureTime': True
            }
        else:
            time_config = {
                'convert_to_date': [start_time_column, end_time_column],
                'start_date': start_time_column,
                'end_date': end_time_column,
                'configureTime': True
            }

        name = os.path.splitext(fname)[0] + "_" + str(uuid.uuid1())[:8]
        self.datastoreNames.append(name)  # remember for future deletion

        full_fname = os.path.join(path, fname)
        configs = self.prepare_file_for_import(full_fname)
        configs[0].update({'name': name})
        configs[0].update({'layer_name': name})
        configs[0].update(time_config)

        # configure the datastore/repo
        configs[0]['geoserver_store'] = {}
        configs[0]['geoserver_store']['type'] = 'geogig'
        configs[0]['geoserver_store']['name'] = name
        configs[0]['geoserver_store']['create'] = 'true'
        configs[0]['geoserver_store']['branch'] = 'master'
        configs[0]['geoserver_store'][
            'geogig_repository'] = "geoserver://" + name

        result = self.generic_import(fname, path=path, configs=configs)
        return result

    def import_file(self, path, configs=None):
        """Imports the file.
        """
        if configs is None:
            configs = []
        self.assertTrue(os.path.exists(path), path)

        # run ogr2ogr
        ogr = OGRImport(path)
        layers = ogr.handle(configuration_options=configs)

        return layers

    def generic_import(self, filename, path, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = os.path.join(path, filename)
        results = self.import_file(path, configs=configs)
        layer_results = []
        for result in results:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertTrue(layer.store in self.datastoreNames)
            self.assertEqual(layer.storeType, 'dataStore')

            if not path.endswith('zip'):
                self.assertGreaterEqual(layer.attributes.count(),
                                        DataSource(path)[0].num_fields)

            layer_results.append(layer)

        return layer_results[0]

    def prepare_file_for_import(self, filepath):
        """ Prepares the file path provided for import; performs some housekeeping, uploads & configures the file.
            Returns a list of dicts of the form {'index': <layer_index>, 'upload_layer_id': <upload_layer_id>}
                these may be used as configuration options for importing all of the layers in the file.
        """
        # Make a copy of the test file, as it's removed in configure_upload()
        filename = os.path.basename(filepath)
        tmppath = os.path.join('/tmp', filename)
        shutil.copy(filepath, tmppath)

        # upload & configure_upload expect closed file objects
        #    This is heritage from originally being closely tied to a view passing request.Files
        of = open(tmppath, 'rb')
        of.close()
        files = [of]
        uploaded_data = self.upload(files, self.admin_user)
        self.configure_upload(uploaded_data, files)
        configs = [{
            'index': l.index,
            'upload_layer_id': l.id
        } for l in uploaded_data.uploadlayer_set.all()]
        return configs

    def create_user(self, username, password, **kwargs):
        """Convenience method for creating users.
        """
        user, created = User.objects.get_or_create(username=username, **kwargs)

        if created:
            user.set_password(password)
            user.save()

        return user
Beispiel #39
0
def geoserver_post_save(instance, sender, **kwargs):
    """Save keywords to GeoServer

       The way keywords are implemented requires the layer
       to be saved to the database before accessing them.
    """
    url = ogc_server_settings.rest

    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (FailedRequestError, EnvironmentError) as e:
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' %
               (ogc_server_settings.LOCATION, instance.name.encode('utf-8')))
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a synchronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.keywords = instance.keyword_list()
    #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
    if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True):
        gs_catalog.save(gs_resource)

    bbox = gs_resource.latlon_bbox
    dx = float(bbox[1]) - float(bbox[0])
    dy = float(bbox[3]) - float(bbox[2])

    dataAspect = 1 if dy == 0 else dx / dy

    height = 550
    width = int(height * dataAspect)

    # Set download links for WMS, WCS or WFS and KML

    links = wms_links(ogc_server_settings.public_url + 'wms?',
                      instance.typename.encode('utf-8'), instance.bbox_string,
                      instance.srid, height, width)

    for ext, name, mime, wms_url in links:
        Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                   url=wms_url,
                                   defaults=dict(
                                       extension=ext,
                                       name=name,
                                       mime=mime,
                                       link_type='image',
                                   ))

    if instance.storeType == "dataStore":
        links = wfs_links(ogc_server_settings.public_url + 'wfs?',
                          instance.typename.encode('utf-8'))
        for ext, name, mime, wfs_url in links:
            Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                       url=wfs_url,
                                       defaults=dict(
                                           extension=ext,
                                           name=name,
                                           mime=mime,
                                           url=wfs_url,
                                           link_type='data',
                                       ))

    elif instance.storeType == 'coverageStore':
        #FIXME(Ariel): This works for public layers, does it work for restricted too?
        # would those end up with no geotiff links, like, forever?
        permissions = {}
        permissions['anonymous'] = instance.get_gen_level(ANONYMOUS_USERS)
        permissions['authenticated'] = instance.get_gen_level(
            AUTHENTICATED_USERS)
        instance.set_gen_level(ANONYMOUS_USERS, 'layer_readonly')

        links = wcs_links(ogc_server_settings.public_url + 'wcs?',
                          instance.typename.encode('utf-8'),
                          bbox=instance.bbox[:-1],
                          crs=instance.bbox[-1],
                          height=height,
                          width=width)
        for ext, name, mime, wcs_url in links:
            Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                                       url=wcs_url,
                                       defaults=dict(
                                           extension=ext,
                                           name=name,
                                           mime=mime,
                                           link_type='data',
                                       ))

        instance.set_gen_level(ANONYMOUS_USERS, permissions['anonymous'])
        instance.set_gen_level(AUTHENTICATED_USERS,
                               permissions['authenticated'])

    kml_reflector_link_download = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode(
        {
            'layers': instance.typename.encode('utf-8'),
            'mode': "download"
        })

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=kml_reflector_link_download,
                               defaults=dict(
                                   extension='kml',
                                   name=_("KML"),
                                   mime='text/xml',
                                   link_type='data',
                               ))

    kml_reflector_link_view = ogc_server_settings.public_url + "wms/kml?" + urllib.urlencode(
        {
            'layers': instance.typename.encode('utf-8'),
            'mode': "refresh"
        })

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=kml_reflector_link_view,
                               defaults=dict(
                                   extension='kml',
                                   name=_("View in Google Earth"),
                                   mime='text/xml',
                                   link_type='data',
                               ))

    tile_url = ('%sgwc/service/gmaps?' % ogc_server_settings.public_url +
                'layers=%s' % instance.typename.encode('utf-8') +
                '&zoom={z}&x={x}&y={y}' + '&format=image/png8')

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=tile_url,
                               defaults=dict(
                                   extension='tiles',
                                   name=_("Tiles"),
                                   mime='image/png',
                                   link_type='image',
                               ))

    html_link_url = '%s%s' % (settings.SITEURL[:-1],
                              instance.get_absolute_url())

    Link.objects.get_or_create(resource=instance.resourcebase_ptr,
                               url=html_link_url,
                               defaults=dict(
                                   extension='html',
                                   name=instance.typename,
                                   mime='text/html',
                                   link_type='html',
                               ))

    #remove links that belong to and old address

    for link in instance.link_set.all():
        if not urlparse(ogc_server_settings.public_url).hostname == urlparse(
                link.url).hostname:
            link.delete()

    #Save layer attributes
    set_attributes(instance)

    #Save layer styles
    set_styles(instance, gs_catalog)
Beispiel #40
0
#!/usr/bin/env python
'''
gsconfig is a python library for manipulating a GeoServer instance via the GeoServer RESTConfig API.

The project is distributed under a MIT License .
'''

__author__ = "David Winslow"
__copyright__ = "Copyright 2012-2015 Boundless, Copyright 2010-2012 OpenPlans"
__license__ = "MIT"

from geoserver.catalog import Catalog

style_to_check = "point"

cat = Catalog("http://localhost:8080/geoserver/rest", "admin", "geoserver")


def has_the_style(l):
    return (l.default_style.name == style_to_check
            or any(s.name == style_to_check for s in l.styles))


print[l.name for l in cat.get_layers() if has_the_style(l)]
Beispiel #41
0
def connectCatalog(ip, user, password):
    ''' Return a catalog from geoserver REST API'''
    # example  Catalog("http://192.168.1.41:8080/geoserver/rest",'user', 'password')
    cat = Catalog(ip, user, password)
    return cat
Beispiel #42
0
        gsconfig_url = open(os.path.join(secure_path, "private_gsconfig_local_url.txt")).read()
        gsconfig_username = open(os.path.join(secure_path, "private_gsconfig_username.txt")).read()
        gsconfig_password = open(os.path.join(secure_path, "private_gsconfig_key.txt")).read()

    except FileNotFoundError as e:
        log.error("Missing default credentials")
        log.error(str(e))
        raise e

    log.info("Connecting to PostgreSQL")        
    conn = psycopg2.connect(pyscopg2_connection_string)
    conn.autocommit = True
    cur = conn.cursor()

    log.info("Connecting to GeoServer")
    cat = Catalog(gsconfig_url, gsconfig_username, gsconfig_password)
    

    if args.droptable:
        log.info("Dropping table %s if it exists" % table_name)
        dropTable(cur, table_name)

    log.info("Pushing data to PostgreSQL")
    insertVector(cur, in_path, table_name)

    log.info("Pushing data to GeoServer")
    publishVector(cur, table_name, pyscopg2_connection_string, cat)

    log.debug("Commiting changes to database and closing connection")
    cur.close()
    conn.commit()
Beispiel #43
0
import csv
import getpass

c = csv.writer(open(
    "TLayers " + strftime("%Y-%m-%d %H%M%S", gmtime()) + ".csv", "wb"),
               delimiter=';',
               quotechar='|')

print "Usuario y contrasenia de Geoserver"

user = raw_input("User: "******"http://geo.gob.bo/geoserver/rest"

print "Conectandose a " + geoserver
cat = Catalog(geoserver, user, password)

# Recuperar los datos
all_layers = cat.get_layers()

# Recuperar estadisticas sobre las capas
sin_metadata = 0
nb_capas = len(all_layers)
print "Capas sin metadatos:"
print ""

#that_layer = cat.get_layer("PassifloraInsignis")
# that_layer.enabled = True

# print that_layer.resource.workspace,that_layer.name,that_layer.resource.metadata_links,that_layer.resource.native_bbox,that_layer.default_style
Beispiel #44
0
class UploaderTests(TestCase):
    """Basic checks to make sure pages load, etc.
    """

    def create_datastore(self, connection, catalog):
        """Convenience method for creating a datastore.
        """
        settings = connection.settings_dict
        ds_name = settings['NAME']
        params = {
            'database': ds_name,
            'passwd': settings['PASSWORD'],
            'namespace': 'http://www.geonode.org/',
            'type': 'PostGIS',
            'dbtype': 'postgis',
            'host': settings['HOST'],
            'user': settings['USER'],
            'port': settings['PORT'],
            'enabled': 'True'
        }

        store = catalog.create_datastore(ds_name, workspace=self.workspace)
        store.connection_parameters.update(params)

        try:
            catalog.save(store)
        except FailedRequestError:
            # assuming this is because it already exists
            pass

        return catalog.get_store(ds_name)

    def create_user(self, username, password, **kwargs):
        """Convenience method for creating users.
        """
        user, created = User.objects.get_or_create(username=username, **kwargs)

        if created:
            user.set_password(password)
            user.save()

        return user

    def setUp(self):

        self.assertTrue(
            os.path.exists(_TEST_FILES_DIR),
            'Test could not run due to missing test data at {0!r}'
            .format(_TEST_FILES_DIR)
        )

        # These tests require geonode to be running on :80!
        self.postgis = db.connections['datastore']
        self.postgis_settings = self.postgis.settings_dict

        self.admin_user = self.create_user('admin', 'admin', is_superuser=True)
        self.non_admin_user = self.create_user('non_admin', 'non_admin')
        self.catalog = Catalog(
            ogc_server_settings.internal_rest,
            *ogc_server_settings.credentials
        )
        if self.catalog.get_workspace('geonode') is None:
            self.catalog.create_workspace('geonode', 'http://www.geonode.org/')
        self.workspace = 'geonode'
        self.datastore = self.create_datastore(self.postgis, self.catalog)

    def tearDown(self):
        """Clean up geoserver.
        """
        self.catalog.delete(self.datastore, recurse=True)

    def import_file(self, path, configs=None):
        """Imports the file.
        """
        if configs is None:
            configs = []
        self.assertTrue(os.path.exists(path))

        # run ogr2ogr
        ogr = OGRImport(path)
        layers = ogr.handle(configuration_options=configs)

        return layers

    def generic_import(self, filename, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = test_file(filename)
        results = self.import_file(path, configs=configs)
        layer_results = []
        for result in results:
            if result[1].get('raster'):
                layer_path = result[0]
                layer_name = os.path.splitext(os.path.basename(layer_path))[0]
                layer = Layer.objects.get(name=layer_name)
                self.assertTrue(layer_path.endswith('.tif'))
                self.assertTrue(os.path.exists(layer_path))
                gdal_layer = gdal.OpenEx(layer_path)
                self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
                layer_results.append(layer)
            else:
                layer = Layer.objects.get(name=result[0])
                self.assertEqual(layer.srid, 'EPSG:4326')
                self.assertEqual(layer.store, self.datastore.name)
                self.assertEqual(layer.storeType, 'dataStore')

                if not path.endswith('zip'):
                    self.assertGreaterEqual(
                        layer.attributes.count(),
                        DataSource(path)[0].num_fields
                    )

                layer_results.append(layer)

        return layer_results[0]

    def generic_api_upload(self, filenames, configs=None):
        """Tests the import api.
        """
        client = AdminClient()
        client.login_as_non_admin()

        # Don't accidentally iterate over given 'foo' as ['f', 'o', 'o'].
        self.assertNotIsInstance(filenames, str)

        # Upload Files
        outfiles = []
        for filename in filenames:
            path = test_file(filename)
            with open(path) as stream:
                data = stream.read()
            upload = SimpleUploadedFile(filename, data)
            outfiles.append(upload)
        response = client.post(
            reverse('uploads-new-json'),
            {'file': outfiles,
             'json': json.dumps(configs)},
            follow=True)
        content = json.loads(response.content)
        self.assertEqual(response.status_code, 200)
        self.assertEqual(content['id'], 1)

        # Configure Uploaded Files
        upload_id = content['id']
        upload_layers = UploadLayer.objects.filter(upload_id=upload_id)

        for upload_layer in upload_layers:
            for config in configs:
                if config['upload_file_name'] == upload_layer.name:
                    payload = config['config']
                    url = '/importer-api/data-layers/{0}/configure/'.format(upload_layer.id)
                    response = client.post(
                        url, data=json.dumps(payload),
                        content_type='application/json'
                    )
                    self.assertTrue(response.status_code, 200)
                    url = '/importer-api/data-layers/{0}/'.format(upload_layer.id)
                    response = client.get(url, content_type='application/json')
                    self.assertTrue(response.status_code, 200)

        return content

    def generic_raster_import(self, filename, configs=None):
        if configs is None:
            configs = [{'index': 0}]

        path = test_file(filename)
        results = self.import_file(path, configs=configs)
        layer_path = results[0][0]
        layer_name = os.path.splitext(os.path.basename(layer_path))[0]
        layer = Layer.objects.get(name=layer_name)
        self.assertTrue(layer_path.endswith('.tif'))
        self.assertTrue(os.path.exists(layer_path))
        gdal_layer = gdal.OpenEx(layer_path)
        self.assertTrue(gdal_layer.GetDriver().ShortName, 'GTiff')
        return layer

    def test_multi_upload(self):
        """Tests Uploading Multiple Files
        """
        upload = self.generic_api_upload(
            filenames=[
                'boxes_with_year_field.zip',
                'boxes_with_date.zip',
                'point_with_date.geojson'
            ],
            configs=[
                {
                    'upload_file_name': 'boxes_with_year_field.shp',
                    'config': [{'index': 0}]
                },
                {
                    'upload_file_name': 'boxes_with_date.shp',
                    'config': [{'index': 0}]
                },
                {
                    'upload_file_name': 'point_with_date.geojson',
                    'config': [{'index': 0}]
                }
            ]
        )
        self.assertEqual(9, upload['count'])

    def test_upload_with_slds(self):
        """Tests Uploading sld
        """
        upload = self.generic_api_upload(
            filenames=[
                'boxes_with_date.zip',
                'boxes.sld',
                'boxes1.sld'
            ],
            configs=[
                {
                    'upload_file_name': 'boxes_with_date.shp',
                    'config': [
                        {
                            'index': 0,
                            'default_style': 'boxes.sld',
                            'styles': ['boxes.sld', 'boxes1.sld']
                        }
                    ]
                }
            ]
        )
        self.assertEqual(6, upload['count'])
        upload_id = upload['id']
        upload_obj = UploadedData.objects.get(pk=upload_id)
        uplayers = UploadLayer.objects.filter(upload=upload_id)
        layer_id = uplayers[0].pk

        upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
        self.assertEqual(6, upfiles_count)

        # Warning: this assumes that Layer pks equal UploadLayer pks
        layer = Layer.objects.get(pk=layer_id)
        gslayer = self.catalog.get_layer(layer.name)
        default_style = gslayer.default_style
        # TODO: can we use public API or omit this?
        self.catalog._cache.clear()
        self.assertEqual('boxes.sld', default_style.filename)

    def test_upload_with_metadata(self):
        """Tests Uploading metadata
        """
        upload = self.generic_api_upload(
            filenames=[
                'boxes_with_date.zip',
                'samplemetadata.xml',
            ],
            configs=[
                {
                    'upload_file_name': 'boxes_with_date.shp',
                    'config': [
                        {
                            'index': 0,
                            'metadata': 'samplemetadata.xml'
                        }
                    ]
                }
            ]
        )
        self.assertEqual(5, upload['count'])
        upload_id = upload['id']
        upload_obj = UploadedData.objects.get(pk=upload_id)
        # TODO: why did we get upload_obj?
        uplayers = UploadLayer.objects.filter(upload=upload_id)
        layer_id = uplayers[0].pk

        upfiles_count = UploadFile.objects.filter(upload=upload_id).count()
        self.assertEqual(5, upfiles_count)

        layer = Layer.objects.get(pk=layer_id)
        self.assertEqual(layer.language, 'eng')
        self.assertEqual(layer.title, 'Old_Americas_LSIB_Polygons_Detailed_2013Mar')

    def test_raster(self):
        """Exercise raster import.
        """
        layer = self.generic_raster_import(
            'test_grid.tif',
            configs=[
                {
                    'index': 0
                }
            ]
        )
        self.assertTrue(layer.name.startswith('test_grid'))

    def test_box_with_year_field(self):
        """Tests the import of test_box_with_year_field.
        """

        layer = self.generic_import(
            'boxes_with_year_field.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date(self):
        """Tests the import of test_boxes_with_date.
        """

        layer = self.generic_import(
            'boxes_with_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_gpkg(self):
        """Tests the import of test_boxes_with_date.gpkg.
        """

        layer = self.generic_import(
            'boxes_with_date.gpkg',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_plus_raster_gpkg_by_index(self):
        """Tests the import of multilayer vector + raster geopackage using index
        """

        layer = self.generic_import(
            'boxes_plus_raster.gpkg',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                },
                {'index': 1},
                {'index': 2},
                {'index': 3},
                {'index': 4},
                {'index': 5},
                {'index': 6},
                {'index': 7},
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,)
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_date_csv(self):
        """Tests a CSV with WKT polygon.
        """

        layer = self.generic_import(
            'boxes_with_date.csv',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_csv_missing_features(self):
        """Test csv that has some rows without geoms and uses ISO-8859 (Latin 4) encoding.
        """

        self.generic_import(
            'missing-features.csv',
            configs=[
                {'index': 0}
            ]
        )

    def test_boxes_with_iso_date(self):
        """Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import(
            'boxes_with_date_iso_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_duplicate_imports(self):
        """Import the same layer twice to ensure file names increment properly.
        """
        path = test_file('boxes_with_date_iso_date.zip')
        ogr = OGRImport(path)
        layers1 = ogr.handle({'index': 0, 'name': 'test'})
        layers2 = ogr.handle({'index': 0, 'name': 'test'})

        self.assertEqual(layers1[0][0], 'test')
        self.assertEqual(layers2[0][0], 'test0')

    def test_launder(self):
        """Ensure the launder function works as expected.
        """
        self.assertEqual(launder('tm_world_borders_simpl_0.3'), 'tm_world_borders_simpl_0_3')
        self.assertEqual(launder('Testing#'), 'testing_')
        self.assertEqual(launder('   '), '_')

    def test_boxes_with_date_iso_date_zip(self):
        """Tests the import of test_boxes_with_iso_date.
        """

        layer = self.generic_import(
            'boxes_with_date_iso_date.zip',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_dates_bc(self):
        """Tests the import of test_boxes_with_dates_bc.
        """

        layer = self.generic_import(
            'boxes_with_dates_bc.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_point_with_date(self):
        """Tests the import of point_with_date.geojson
        """

        layer = self.generic_import(
            'point_with_date.geojson',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )

        # OGR will name geojson layers 'ogrgeojson' we rename to the path basename
        self.assertTrue(layer.name.startswith('point_with_date'))
        date_attr = get_layer_attr(layer, 'date_as_date')
        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def test_boxes_with_end_date(self):
        """Tests the import of test_boxes_with_end_date.

        This layer has a date and an end date field that are typed correctly.
        """

        layer = self.generic_import(
            'boxes_with_end_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date', 'enddate'],
                    'start_date': 'date',
                    'end_date': 'enddate',
                    'configureTime': True
                }
            ]
        )

        date_attr = get_layer_attr(layer, 'date_as_date')
        end_date_attr = get_layer_attr(layer, 'enddate_as_date')

        self.assertEqual(date_attr.attribute_type, 'xsd:dateTime')
        self.assertEqual(end_date_attr.attribute_type, 'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute,
            end_attribute=end_date_attr.attribute
        )
        self.generic_time_check(
            layer,
            attribute=date_attr.attribute,
            end_attribute=end_date_attr.attribute
        )

    def test_us_states_kml(self):
        """Tests the import of us_states_kml.

        This layer has a date and an end date field that are typed correctly.
        """
        # TODO: Support time in kmls.
        layer = self.generic_import(
            'us_states.kml',
            configs=[
                {
                    'index': 0
                }
            ]
        )
        self.assertEqual(layer.name.lower(), "us_states")

    def test_mojstrovka_gpx(self):
        """Tests the import of mojstrovka.gpx.

        This layer has a date and an end date field that are typed correctly.
        """
        layer = self.generic_import(
            'mojstrovka.gpx',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['time'],
                    'configureTime': True
                }
            ]
        )
        date_attr = get_layer_attr(layer, 'time_as_date')
        self.assertEqual(date_attr.attribute_type, u'xsd:dateTime')

        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_attr.attribute
        )
        self.generic_time_check(layer, attribute=date_attr.attribute)

    def generic_time_check(self, layer, attribute=None, end_attribute=None):
        """Convenience method to run generic tests on time layers.
        """
        # TODO: can we use public API or omit this?
        self.catalog._cache.clear()
        resource = self.catalog.get_resource(
            layer.name, store=layer.store, workspace=self.workspace
        )

        time_info = resource.metadata['time']
        self.assertEqual('LIST', time_info.presentation)
        self.assertEqual(True, time_info.enabled)
        self.assertEqual(attribute, time_info.attribute)
        self.assertEqual(end_attribute, time_info.end_attribute)

    def test_us_shootings_csv(self):
        """Tests the import of US_Shootings.csv.
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')

        path = test_file('US_Shootings.csv')
        layer = self.generic_import(
            path,
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['Date']
                }
            ]
        )
        self.assertTrue(layer.name.startswith('us_shootings'))

        date_field = 'date'
        configure_time(
            self.catalog.get_layer(layer.name).resource,
            attribute=date_field
        )
        self.generic_time_check(layer, attribute=date_field)

    def test_sitins(self):
        """Tests the import of US_Civil_Rights_Sitins0.csv
        """
        if osgeo.gdal.__version__ < '2.0.0':
            self.skipTest('GDAL Version does not support open options')
        layer = self.generic_import(
            'US_Civil_Rights_Sitins0.csv',
            configs=[
                {
                    'index': 0, 'convert_to_date': ['Date']
                }
            ]
        )
        self.assertEqual(layer.name.lower(), 'us_civil_rights_sitins0')

    def get_layer_names(self, path):
        """Gets layer names from a data source.
        """
        data_source = DataSource(path)
        return [layer.name for layer in data_source]

    def test_gdal_import(self):
        path = test_file('point_with_date.geojson')
        self.generic_import(
            path,
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date']
                }
            ]
        )

    def test_wfs(self):
        """Tests the import from a WFS Endpoint
        """
        wfs = 'WFS:http://demo.boundlessgeo.com/geoserver/wfs'
        ogr = OGRImport(wfs)
        configs = [
            {'layer_name': 'og:bugsites'},
            {'layer_name': 'topp:states'}
        ]
        layers = ogr.handle(configuration_options=configs)
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_arcgisjson(self):
        """Tests the import from a WFS Endpoint
        """
        endpoint = 'http://sampleserver3.arcgisonline.com/ArcGIS/rest/services/Hydrography/Watershed173811/FeatureServer/0/query?where=objectid+%3D+objectid&outfields=*&f=json'
        ogr = OGRImport(endpoint)
        configs = [{'index':0}]
        layers = ogr.handle(configuration_options=configs)
        for result in layers:
            layer = Layer.objects.get(name=result[0])
            self.assertEqual(layer.srid, 'EPSG:4326')
            self.assertEqual(layer.store, self.datastore.name)
            self.assertEqual(layer.storeType, 'dataStore')

    def test_file_add_view(self):
        """Tests the file_add_view.
        """
        client = AdminClient()

        # test login required for this view
        request = client.get(reverse('uploads-new'))
        self.assertEqual(request.status_code, 302)

        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(response.status_code, 200)
        self.assertEqual(response.context['request'].path, reverse('uploads-list'))
        self.assertEqual(len(response.context['object_list']), 1)
        upload = response.context['object_list'][0]
        self.assertEqual(upload.user.username, 'non_admin')
        self.assertEqual(upload.file_type, 'GeoJSON')
        self.assertTrue(upload.uploadlayer_set.all())
        self.assertEqual(upload.state, 'UPLOADED')
        self.assertIsNotNone(upload.name)

        uploaded_file = upload.uploadfile_set.first()
        self.assertTrue(os.path.exists(uploaded_file.file.path))

        with open(test_file('empty_file.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(response.status_code, 200)
        self.assertIn('file', response.context_data['form'].errors)

    def test_file_add_view_as_json(self):
        """Tests the file_add_view.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new-json'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(response.status_code, 200)
        self.assertIn('application/json', response.get('Content-Type', ''))
        content = json.loads(response.content)
        self.assertIn('state', content)
        self.assertIn('id', content)

    def test_describe_fields(self):
        """Tests the describe fields functionality.
        """
        path = test_file('US_Shootings.csv')
        with GDALInspector(path) as inspector:
            layers = inspector.describe_fields()

        self.assertTrue(layers[0]['layer_name'], 'us_shootings')
        self.assertEqual([n['name'] for n in layers[0]['fields']], ['Date', 'Shooter', 'Killed',
                                                                    'Wounded', 'Location', 'City',
                                                                    'Longitude', 'Latitude'])
        self.assertEqual(layers[0]['feature_count'], 203)

    def test_gdal_file_type(self):
        """Tests the describe fields functionality.
        """
        filenames = {
            'US_Shootings.csv': 'CSV',
            'point_with_date.geojson': 'GeoJSON',
            'mojstrovka.gpx': 'GPX',
            'us_states.kml': 'KML',
            'boxes_with_year_field.shp': 'ESRI Shapefile',
            'boxes_with_date_iso_date.zip': 'ESRI Shapefile'
        }
        from .models import NoDataSourceFound
        try:
            for filename, file_type in sorted(filenames.items()):
                path = test_file(filename)
                with GDALInspector(path) as inspector:
                    self.assertEqual(inspector.file_type(), file_type)
        except NoDataSourceFound:
            logging.exception('No data source found in: {0}'.format(path))
            raise

    def test_configure_view(self):
        """Tests the configuration view.
        """
        path = test_file('point_with_date.geojson')
        new_user = User.objects.create(username='******')
        new_user_perms = ['change_resourcebase_permissions']
        client = AdminClient()
        client.login_as_non_admin()

        with open(path) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        upload = response.context['object_list'][0]

        payload = [
            {
                'index': 0,
                'convert_to_date': ['date'],
                'start_date': 'date',
                'configureTime': True,
                'editable': True,
                'permissions': {
                    'users': {
                        'test': new_user_perms,
                        'AnonymousUser': [
                            'change_layer_data',
                            'download_resourcebase',
                            'view_resourcebase'
                        ]
                    }
                }
            }
        ]
        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id),
            data=json.dumps(payload),
            content_type='application/json'
        )

        self.assertTrue(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertEqual(
            Layer.objects.all()[0].owner.username,
            self.non_admin_user.username
        )

        perms = first_layer.get_all_level_info()
        user = User.objects.get(username=self.non_admin_user.username)

        # check user permissions
        expected_perms = [
            u'publish_resourcebase',
            u'change_resourcebase_permissions',
            u'delete_resourcebase',
            u'change_resourcebase',
            u'change_resourcebase_metadata',
            u'download_resourcebase',
            u'view_resourcebase',
            u'change_layer_style',
            u'change_layer_data'
        ]
        for perm in expected_perms:
            self.assertIn(perm, perms['users'][user])

        self.assertTrue(perms['users'][new_user])
        self.assertIn(
            'change_resourcebase_permissions',
            perms['users'][new_user]
        )
        self.assertIn(
            'change_layer_data',
            perms['users'][User.objects.get(username='******')]
        )

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, first_layer)

    def test_configure_view_convert_date(self):
        """Tests the configure view with a dataset that needs to be converted to a date.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('US_Shootings.csv')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        upload = response.context['object_list'][0]

        payload = [
            {
                'index': 0,
                'convert_to_date': ['Date'],
                'start_date': 'Date',
                'configureTime': True,
                'editable': True
            }
        ]

        response = client.get(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id)
        )
        self.assertEqual(response.status_code, 405)

        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id)
        )
        self.assertEqual(response.status_code, 400)

        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id),
            data=json.dumps(payload),
            content_type='application/json'
        )
        self.assertTrue(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')
        self.assertTrue(first_layer.attributes.filter(attribute='date'), 'xsd:dateTime')

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)

        # ensure a user who does not own the upload cannot configure an import from it.
        client.logout()
        client.login_as_admin()
        response = client.post(
            '/importer-api/data-layers/{0}/configure/'.format(upload.id)
        )
        self.assertEqual(response.status_code, 404)

    def test_list_api(self):
        client = AdminClient()

        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 401)

        client.login_as_non_admin()

        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)

        admin = User.objects.get(username=self.admin_user.username)
        non_admin = User.objects.get(username=self.non_admin_user.username)

        path = test_file('US_Shootings.csv')
        with open(path, 'rb') as stream:
            uploaded_file = SimpleUploadedFile('test_data', stream.read())
            admin_upload = UploadedData.objects.create(state='Admin test', user=admin)
            admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

            non_admin_upload = UploadedData.objects.create(state='Non admin test', user=non_admin)
            non_admin_upload.uploadfile_set.add(UploadFile.objects.create(file=uploaded_file))

        client.login_as_admin()
        response = client.get('/importer-api/data/')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 2)

        response = client.get('/importer-api/data/?user__username=admin')
        self.assertEqual(response.status_code, 200)
        body = json.loads(response.content)
        self.assertEqual(len(body['objects']), 1)

    def test_layer_list_api(self):
        client = AdminClient()
        response = client.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 401)

        client.login_as_non_admin()

        response = client.get('/importer-api/data-layers/')
        self.assertEqual(response.status_code, 200)

    def test_delete_from_non_admin_api(self):
        """Ensure users can delete their data.
        """
        client = AdminClient()

        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(UploadedData.objects.all().count(), 1)
        upload_id = UploadedData.objects.first().id
        response = client.delete('/importer-api/data/{0}/'.format(upload_id))
        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def test_delete_from_admin_api(self):
        """Ensure that administrators can delete data that isn't theirs.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        self.assertEqual(UploadedData.objects.all().count(), 1)

        client.logout()
        client.login_as_admin()

        upload_id = UploadedData.objects.first().id
        response = client.delete('/importer-api/data/{0}/'.format(upload_id))

        self.assertEqual(response.status_code, 204)

        self.assertEqual(UploadedData.objects.all().count(), 0)

    def naming_an_import(self):
        """Tests providing a name in the configuration options.
        """
        client = AdminClient()
        client.login_as_non_admin()
        name = 'point-with-a-date'
        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'name': name,
            'editable': True
        }
        response = client.post(
            '/importer-api/data-layers/1/configure/',
            data=json.dumps(payload),
            content_type='application/json'
        )
        self.assertEqual(response.status_code, 200)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.title, name.replace('-', '_'))

    def test_api_import(self):
        """Tests the import api.
        """
        client = AdminClient()
        client.login_as_non_admin()

        with open(test_file('point_with_date.geojson')) as stream:
            response = client.post(
                reverse('uploads-new'),
                {'file': stream},
                follow=True
            )

        payload = {
            'index': 0,
            'convert_to_date': ['date'],
            'start_date': 'date',
            'configureTime': True,
            'editable': True
        }

        self.assertIsInstance(
            UploadLayer.objects.first().configuration_options,
            dict
        )

        response = client.get('/importer-api/data-layers/1/')
        self.assertEqual(response.status_code, 200)

        response = client.post(
            '/importer-api/data-layers/1/configure/',
            data=json.dumps([payload]),
            content_type='application/json'
        )
        self.assertEqual(response.status_code, 200)
        self.assertIn('task', response.content)

        first_layer = Layer.objects.all()[0]
        self.assertEqual(first_layer.srid, 'EPSG:4326')
        self.assertEqual(first_layer.store, self.datastore.name)
        self.assertEqual(first_layer.storeType, 'dataStore')
        self.assertTrue(first_layer.attributes[1].attribute_type, 'xsd:dateTime')

        catalog_layer = self.catalog.get_layer(first_layer.name)
        self.assertIn('time', catalog_layer.resource.metadata)
        self.assertEqual(UploadLayer.objects.first().layer, first_layer)
        self.assertTrue(UploadLayer.objects.first().task_id)

    def test_valid_file_extensions(self):
        """Test the file extension validator.
        """

        for extension in load_handler(OSGEO_IMPORTER, 'test.txt').valid_extensions:
            filename = 'test.{0}'.format(extension)
            upload = SimpleUploadedFile(filename, '')
            self.assertIsNone(validate_file_extension(upload))

        with self.assertRaises(ValidationError):
            validate_file_extension(SimpleUploadedFile('test.txt', ''))

    def test_no_geom(self):
        """Test the file extension validator.
        """

        with self.assertRaises(ValidationError):
            validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,loc\nyes,POINT(0,0)'))

        # This should pass (geom type is unknown)
        validate_inspector_can_read(SimpleUploadedFile('test.csv', 'test,WKT\nyes,POINT(0,0)'))

    def test_numeric_overflow(self):
        """Regression test for numeric field overflows in shapefiles.

        # https://trac.osgeo.org/gdal/ticket/5241
        """
        self.generic_import(
            'Walmart.zip',
            configs=[
                {
                    'configureTime':False,
                    'convert_to_date': ['W1_OPENDAT'],
                    'editable': True,
                    'index':0,
                    'name': 'Walmart',
                    'start_date': 'W1_OPENDAT'
                }
            ]
        )

    def test_multipolygon_shapefile(self):
        """Tests shapefile with multipart polygons.
        """

        self.generic_import('PhoenixFirstDues.zip', configs=[{'index': 0}])

    def test_istanbul(self):
        """Tests shapefile with multipart polygons and non-WGS84 SR.
        """
        result = self.generic_import(
            'Istanbul.zip',
            configs=[
                {'index': 0}
            ]
        )
        feature_type = self.catalog.get_resource(result.name)
        self.assertEqual(feature_type.projection, 'EPSG:32635')

    def test_gwc_handler(self):
        """Tests the GeoWebCache handler
        """
        layer = self.generic_import(
            'boxes_with_date.shp',
            configs=[
                {
                    'index': 0,
                    'convert_to_date': ['date'],
                    'start_date': 'date',
                    'configureTime': True
                }
            ]
        )

        gwc = GeoWebCacheHandler(None)
        gs_layer = self.catalog.get_layer(layer.name)

        self.assertTrue(gwc.time_enabled(gs_layer))
        gs_layer.fetch()

        payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
        self.assertIn('regexParameterFilter', payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

        # Don't configure time, ensure everything still works
        layer = self.generic_import(
            'boxes_with_date_iso_date.shp',
            configs=[
                {
                    'index': 0
                }
            ]
        )
        gs_layer = self.catalog.get_layer(layer.name)
        self.catalog._cache.clear()
        gs_layer.fetch()

        payload = self.catalog.http.request(gwc.gwc_url(gs_layer))
        self.assertNotIn('regexParameterFilter', payload[1])
        self.assertEqual(int(payload[0]['status']), 200)

    def test_utf8(self):
        """Tests utf8 characters in attributes
        """
        path = test_file('china_provinces.shp')
        layer = self.generic_import(path)
        ogr = OGRImport(path)
        datastore, _ = ogr.open_target_datastore(ogr.target_store)
        sql = (
            "select NAME_CH from {0} where NAME_PY = 'An Zhou'"
            .format(layer.name)
        )
        result = datastore.ExecuteSQL(sql)
        feature = result.GetFeature(0)
        self.assertEqual(feature.GetField('name_ch'), '安州')

    def test_non_converted_date(self):
        """Test converting a field as date.
        """
        results = self.generic_import(
            'TM_WORLD_BORDERS_2005.zip',
            configs=[
                {
                    'index': 0,
                    'start_date': 'Year',
                    'configureTime': True
                }
            ]
        )
        layer = self.catalog.get_layer(results.typename)
        self.assertIn('time', layer.resource.metadata)
        self.assertEqual('year', layer.resource.metadata['time'].attribute)

    def test_fid_field(self):
        """
        Regression test for preserving an FID field when target layer supports
        it but source does not.
        """
        self.generic_import(
            'noaa_paleoclimate.zip',
            configs=[
                {
                    'index': 0
                }
            ]
        )

    def test_csv_with_wkb_geometry(self):
        """Exercise import of CSV files with multiple geometries.
        """
        filenames = [
            'police_csv.csv',
            'police_csv_nOGC.csv',
            'police_csv_noLatLon.csv',
            'police_csv_WKR.csv',
            'police_csv_nFID.csv',
            'police_csv_nOGFID.csv',
            'police_csv_noWKB.csv'
        ]

        for filename in filenames:
            self.generic_import(
                filename,
                {
                    'configureTime': True,
                    'convert_to_date': ['date_time'],
                    'editable': True,
                    'index': 0,
                    'name': filename.lower(),
                    'permissions': {
                        'users':{
                            'AnonymousUser': [
                                'change_layer_data',
                                'download_resourcebase',
                                'view_resourcebase'
                            ]
                        }
                    },
                    'start_date':'date_time',
                }
            )
Beispiel #45
0
def geoserver_pre_save(instance, sender, **kwargs):
    """Send information to geoserver.

       The attributes sent include:

        * Title
        * Abstract
        * Name
        * Keywords
        * Metadata Links,
        * Point of Contact name and url
    """
    url = ogc_server_settings.rest
    try:
        gs_catalog = Catalog(url, _user, _password)
        gs_resource = gs_catalog.get_resource(instance.name)
    except (EnvironmentError, FailedRequestError) as e:
        gs_resource = None
        msg = ('Could not connect to geoserver at "%s"'
               'to save information for layer "%s"' %
               (ogc_server_settings.LOCATION, instance.name.encode('utf-8')))
        logger.warn(msg, e)
        # If geoserver is not online, there is no need to continue
        return

    # If there is no resource returned it could mean one of two things:
    # a) There is a synchronization problem in geoserver
    # b) The unit tests are running and another geoserver is running in the
    # background.
    # For both cases it is sensible to stop processing the layer
    if gs_resource is None:
        logger.warn('Could not get geoserver resource for %s' % instance)
        return

    gs_resource.title = instance.title
    gs_resource.abstract = instance.abstract
    gs_resource.name = instance.name

    # Get metadata links
    metadata_links = []
    for link in instance.link_set.metadata():
        metadata_links.append((link.name, link.mime, link.url))

    gs_resource.metadata_links = metadata_links
    #gs_resource should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
    if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True):
        gs_catalog.save(gs_resource)

    gs_layer = gs_catalog.get_layer(instance.name)

    if instance.poc and instance.poc.user:
        gs_layer.attribution = str(instance.poc.user)
        profile = Profile.objects.get(user=instance.poc.user)
        gs_layer.attribution_link = settings.SITEURL[:
                                                     -1] + profile.get_absolute_url(
                                                     )
        #gs_layer should only be called if ogc_server_settings.BACKEND_WRITE_ENABLED == True
        if getattr(ogc_server_settings, "BACKEND_WRITE_ENABLED", True):
            gs_catalog.save(gs_layer)
    """Get information from geoserver.

       The attributes retrieved include:

       * Bounding Box
       * SRID
       * Download links (WMS, WCS or WFS and KML)
       * Styles (SLD)
    """
    gs_resource = gs_catalog.get_resource(instance.name)

    bbox = gs_resource.latlon_bbox

    #FIXME(Ariel): Correct srid setting below
    #self.srid = gs_resource.src

    # Set bounding box values

    instance.bbox_x0 = bbox[0]
    instance.bbox_x1 = bbox[1]
    instance.bbox_y0 = bbox[2]
    instance.bbox_y1 = bbox[3]

    instance.update_thumbnail(save=False)
Beispiel #46
0
def calculate_yield(db, schema):

    try:
        conn = psycopg2.connect(
            "dbname={0} user={1} host={2} password={3}".format(
                db, cfg.connection['user'], cfg.connection['host'],
                cfg.connection['password']))
        cur = conn.cursor()
        storename = db + '_' + schema + '_agareas'

        cat = Catalog(cfg.geoserver['rest_url'],
                      username=cfg.geoserver['user'],
                      password=cfg.geoserver['password'],
                      disable_ssl_certificate_validation=True)
        try:
            print('Check if the layer exists')
            something = cat.get_store(storename, cfg.geoserver['workspace'])
            if not something:
                print("No store")
                raise Exception
            else:

                print("Store exists")
        except Exception as e:
            print('Entering geoserver code')
            temp_dir = tempfile.mkdtemp()
            pg_sql = """SELECT * FROM {0}.agareas""".format(schema)

            export_pg_table(temp_dir, storename, cfg.connection['host'],
                            cfg.connection['user'], cfg.connection['password'],
                            db, pg_sql)
            target_zip = os.path.join(
                os.path.join(temp_dir, storename + '.zip'))

            with zipfile.ZipFile(target_zip, 'w') as pg_zip:
                for f in os.listdir(os.path.join(temp_dir)):
                    if '.zip' not in f:
                        f = os.path.join(temp_dir, f)
                        pg_zip.write(f, basename(f))

            rest_url = cfg.geoserver['rest_url']

            if rest_url[-1] != "/":
                rest_url = rest_url + '/'

            headers = {
                'Content-type': 'application/zip',
            }

            request_url = '{0}workspaces/{1}/datastores/{2}/file.shp'.format(
                rest_url, cfg.geoserver['workspace'],
                storename)  # Creating the rest url

            user = cfg.geoserver['user']
            password = cfg.geoserver['password']
            requests.put(
                request_url,
                verify=False,
                headers=headers,
                data=open(target_zip, 'rb'),
                auth=(user,
                      password))  # Creating the resource on the geoserver

            if temp_dir is not None:
                if os.path.exists(temp_dir):
                    print('whooo')
                    #shutil.rmtree(temp_dir)

        sql = """SELECT gid,avg(max) as max  FROM(SELECT gid,ensemble,max(gwad) FROM {0}.dssat GROUP BY gid,ensemble ORDER BY gid,ensemble)  as foo GROUP BY gid""".format(
            schema)

        cur.execute(sql)
        data = cur.fetchall()

        data.sort()

        conn.close()

        return data, storename
    except Exception as e:
        print(e)
        return e
Beispiel #47
0
class UploaderBase(TestCase):

    settings_overrides = []

    @classmethod
    def setUpClass(cls):
        super(UploaderBase, cls).setUpClass()

        # don't accidentally delete anyone's layers
        if Layer.objects.all().count():
            if 'DELETE_LAYERS' not in os.environ:
                print
                print 'FAIL: There are layers in the test database'
                print 'Will not run integration tests unless `DELETE_LAYERS`'
                print 'Is specified as an environment variable'
                print
                raise Exception('FAIL, SEE ABOVE')

        # make a test_settings module that will apply our overrides
        test_settings = ['from geonode.settings import *']
        if os.path.exists('geonode/upload/tests/test_settings.py'):
            test_settings.append(
                'from geonode.upload.tests.test_settings import *')
        for so in cls.settings_overrides:
            test_settings.append('%s=%s' % so)
        with open('integration_settings.py', 'w') as fp:
            fp.write('\n'.join(test_settings))

        # runserver with settings
        args = [
            'python', 'manage.py', 'runserver',
            '--settings=integration_settings', '--verbosity=0'
        ]
        # see http://www.doughellmann.com/PyMOTW/subprocess/#process-groups-sessions
        cls._runserver = subprocess.Popen(args,
                                          stderr=open('test.log', 'w'),
                                          preexec_fn=os.setsid)

        # await startup
        cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD)
        for i in range(5):
            try:
                cl.get_html('/')
                break
            except:
                time.sleep(.5)
        if cls._runserver.poll() is not None:
            raise Exception("Error starting server, check test.log")

    @classmethod
    def tearDownClass(cls):
        super(UploaderBase, cls).tearDownClass()

        # kill server process group
        os.killpg(cls._runserver.pid, signal.SIGKILL)
        if os.path.exists('integration_settings.py'):
            os.unlink('integration_settings.py')

    def setUp(self):
        super(UploaderBase, self).setUp()
        self._tempfiles = []
        self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD)
        self.catalog = Catalog(GEOSERVER_URL + 'rest', GEOSERVER_USER,
                               GEOSERVER_PASSWD)
        # @todo - this is obviously the brute force approach - ideally,
        # these cases would be more declarative and delete only the things
        # they mess with
        for l in Layer.objects.all():
            try:
                l.delete()
            except:
                print 'unable to delete layer', l
        # and destroy anything left dangling on geoserver
        cat = Layer.objects.gs_catalog
        map(lambda name: cascading_delete(cat, name),
            [l.name for l in cat.get_layers()])

    def tearDown(self):
        super(UploaderBase, self).tearDown()
        map(os.unlink, self._tempfiles)

    def check_layer_geonode_page(self, path):
        """ Check that the final layer page render's correctly after
        an layer is uploaded """
        # the final url for uploader process. This does a redirect to
        # the final layer page in geonode
        resp, _ = self.client.get_html(path)
        self.assertTrue('content-type' in resp.headers)
        # if we don't get a content type of html back, thats how we
        # know there was an error.
        self.assertTrue(resp.headers['content-type'].startswith('text/html'))

    def check_layer_geoserver_caps(self, original_name):
        """ Check that a layer shows up in GeoServer's get
        capabilities document """
        # using owslib
        wms = get_wms(layer_name=original_name)
        self.assertTrue(original_name in wms.contents,
                        '%s is not in %s' % (original_name, wms.contents))

    def check_layer_geoserver_rest(self, original_name):
        """ Check that a layer shows up in GeoServer rest api after
        the uploader is done"""
        # using gsconfig to test the geoserver rest api.
        layer = self.catalog.get_layer(original_name)
        self.assertIsNotNone(layer is not None)

    def check_and_pass_through_timestep(self):
        raise Exception('not implemented')
        redirect_to = data['redirect_to']
        self.assertEquals(redirect_to, upload_step('time'))
        resp = self.client.make_request(upload_step('time'))
        self.assertEquals(resp.code, 200)
        data = {'csrfmiddlewaretoken': self.client.get_crsf_token()}
        resp = self.client.make_request(upload_step('time'), data)
        data = json.loads(resp.read())
        return resp, data

    def complete_raster_upload(self, file_path, resp, data):
        return self.complete_upload(file_path, resp, data, is_raster=True)

    def check_save_step(self, resp, data):
        """Verify the initial save step"""
        self.assertEquals(resp.code, 200)
        self.assertTrue(isinstance(data, dict))
        # make that the upload returns a success True key
        self.assertTrue(data['success'], 'expected success but got %s' % data)
        self.assertTrue('redirect_to' in data)

    def complete_upload(self, file_path, resp, data, is_raster=False):
        """Method to check if a layer was correctly uploaded to the
        GeoNode.

        arguments: file path, the django http response

           Checks to see if a layer is configured in Django
           Checks to see if a layer is configured in GeoServer
               checks the Rest API
               checks the get cap document """

        layer_name, ext = os.path.splitext(os.path.basename(file_path))

        self.check_save_step(resp, data)

        layer_page = self.finish_upload(data['redirect_to'], layer_name,
                                        is_raster)

        self.check_layer_complete(layer_page, layer_name)

    def finish_upload(self,
                      current_step,
                      layer_name,
                      is_raster=False,
                      skip_srs=False):

        if (not is_raster and _ALLOW_TIME_STEP):
            resp, data = self.check_and_pass_through_timestep()
            self.assertEquals(resp.code, 200)
            self.assertTrue(data['success'],
                            'expected success but got %s' % data)
            self.assertTrue('redirect_to' in data)
            current_step = data['redirect_to']
            self.wait_for_progress(data.get('progress'))

        if not is_raster and not skip_srs:
            self.assertEquals(current_step, upload_step('srs'))
            # if all is good, the srs step will redirect to the final page
            resp = self.client.get(current_step)
        else:
            self.assertEquals(current_step, upload_step('final'))
            resp = self.client.get(current_step)

        self.assertEquals(resp.code, 200)
        url = json.loads(resp.read())['url']
        # and the final page should redirect to tha layer page
        # @todo - make the check match completely (endswith at least)
        # currently working around potential 'orphaned' db tables
        self.assertTrue(layer_name in url,
                        'expected %s in URL, got %s' % (layer_name, url))
        return url

    def check_upload_model(self, original_name):
        # we can only test this if we're using the same DB as the test instance
        if not settings.OGC_SERVER['default']['OPTIONS']['DATASTORE']: return
        try:
            upload = Upload.objects.get(layer__name=original_name)
        except Upload.DoesNotExist:
            self.fail('expected to find Upload object for %s' % original_name)
        self.assertTrue(upload.complete)

    def check_layer_complete(self, layer_page, original_name):
        '''check everything to verify the layer is complete'''
        self.check_layer_geonode_page(layer_page)
        # @todo use the original_name
        # currently working around potential 'orphaned' db tables
        # this grabs the name from the url (it might contain a 0)
        original_name = os.path.basename(layer_page).split(':')[1]
        self.check_layer_geoserver_caps(original_name)
        self.check_layer_geoserver_rest(original_name)
        self.check_upload_model(original_name)

    def check_invalid_projection(self, layer_name, resp, data):
        """ Makes sure that we got the correct response from an layer
        that can't be uploaded"""
        if _ALLOW_TIME_STEP:
            resp, data = self.check_and_pass_through_timestep()
        self.assertTrue(resp.code, 200)
        self.assertTrue(data['success'])
        self.assertEquals(upload_step("srs"), data['redirect_to'])
        resp, soup = self.client.get_html(data['redirect_to'])
        # grab an h2 and find the name there as part of a message saying it's bad
        h2 = soup.find_all(['h2'])[0]
        self.assertTrue(str(h2).find(layer_name))

    def upload_folder_of_files(self, folder, final_check):

        mains = ('.tif', '.shp', '.zip')

        def is_main(_file):
            _, ext = os.path.splitext(_file)
            return (ext.lower() in mains)

        self.client.login()
        main_files = filter(is_main, os.listdir(folder))
        for main in main_files:
            # get the abs path to the file
            _file = os.path.join(folder, main)
            base, _ = os.path.splitext(_file)
            resp, data = self.client.upload_file(_file)
            self.wait_for_progress(data.get('progress'))
            final_check(base, resp, data)

    def upload_file(self, fname, final_check, check_name=None):
        self.client.login()
        if not check_name:
            check_name, _ = os.path.splitext(fname)
        resp, data = self.client.upload_file(fname)
        self.wait_for_progress(data.get('progress'))
        final_check(check_name, resp, data)

    def wait_for_progress(self, progress_url):
        if progress_url:
            resp = self.client.get(progress_url)
            assert resp.getcode() == 200, 'Invalid progress status code'
            raw_data = resp.read()
            json_data = json.loads(raw_data)
            # "COMPLETE" state means done
            if json_data.get('state', '') == 'RUNNING':
                time.sleep(0.1)
                self.wait_for_progress(progress_url)

    def temp_file(self, ext):
        fd, abspath = tempfile.mkstemp(ext)
        self._tempfiles.append(abspath)
        return fd, abspath

    def make_csv(self, *rows):
        fd, abspath = self.temp_file('.csv')
        fp = os.fdopen(fd, 'wb')
        out = csv.writer(fp)
        for r in rows:
            out.writerow(r)
        fp.close()
        return abspath
Beispiel #48
0
class wrap_geoserver:
    """ Geoserver (gsconfig) wrapper """

    def __init__(
        self, geoserver_name, username=username, password=password, easy=False
    ):
        if geoserver_name in list(REST.keys()):
            self.path = REST[geoserver_name]
        else:
            self.path = geoserver_name
        self.wms = self.path.replace("rest/", "wms")

        self.name = geoserver_name
        self.catalog = Catalog(self.path, username, password)

        if not easy:
            self.layers = []
            self.layer_names = []

            for layer in self.catalog.get_layers():
                self.layers.append(layer)
                self.layer_names.append(layer.name)

            self.stores = [store for store in self.catalog.get_stores()]
            self.store_names = [store.name for store in self.stores]

            styles = []
            self.workspaces = []
            self.workspace_names = []

            for workspace in self.catalog.get_workspaces():
                styles = styles + self.catalog.get_styles(workspace)
                self.workspace_names.append(workspace._name)
                self.workspaces.append(workspace)

            self.styles = styles + [style for style in self.catalog.get_styles()]
            self.style_names = [style.name for style in self.styles]

    def unpack(self, workspace_name, store_type="datastore"):
        layers_and_styles = {}
        features = []
        workspace = self.get_workspace(workspace_name)

        if store_type == "datastore":
            store_url = workspace.datastore_url
        elif store_type == "coveragestore":
            store_url = workspace.coveragestore_url
        else:
            print("No correct store given")

        for datastore in tqdm(get(store_url, "name")):
            url = "{}workspaces/{}/datastores/{}".format(
                self.path, workspace.name, datastore
            )
            features = features + get(url, between_quotes=True)

        for feature in features:
            layer_name = os.path.basename(feature).split(".")[0]
            self.get_layer(self.get_slug(workspace.name, layer_name))
            layers_and_styles[layer_name] = self.layer.default_style

        setattr(self, workspace_name + "_data", layers_and_styles)
        return layers_and_styles

    def get_layer(self, layer, easy=False):
        self.layer = self.catalog.get_layer(layer)

        if not easy:
            self.resource = self.layer.resource
            self.layer_name = self.layer.resource.name
            self.sld_name = self.layer.default_style.name
            self.sld_body = self.layer.default_style.sld_body
            self.layer_latlon_bbox = self.layer.resource.latlon_bbox
            self.layer_title = self.layer.resource.title
            self.layer_abstract = self.layer.resource.abstract

    def get_store(self, layer):
        self.store = self.layer.resource._store

    def get_resource(self):
        self.resource = self.catalog.get_resource(self.layer.name, self.store)

    def get_workspace(self, workspace_name):
        self.workspace = self.catalog.get_workspace(workspace_name)
        self.workspace_name = self.workspace._name
        return self.workspace

    def write_abstract(self, data, load_resource=True):
        if load_resource:
            self.get_resource()
        self.resource.abstract = data
        self.catalog.save(self.resource)

    def write_title(self, title):
        self.resource.title = title
        self.catalog.save(self.resource)

    def get_connection_parameters(self):
        self.get_resource()
        return self.resource.store.connection_parameters

    def create_workspace(self, workspace_name):
        workspace_exists = workspace_name in self.workspace_names

        if not workspace_exists:
            self.workspace = self.catalog.create_workspace(workspace_name)

        else:
            print("workspace already exists, using existing workspace")

        self.workspace = self.catalog.get_workspace(workspace_name)
        self.workspace_name = workspace_name

    def create_postgis_datastore(self, store_name, workspace_name, pg_data):

        try:
            self.store = self.catalog.get_store(store_name, self.workspace_name)
            print("store within workspace exists, using existing store")

        except Exception as e:
            print(e)

            ds = self.catalog.create_datastore(store_name, workspace_name)
            ds.connection_parameters.update(
                host=pg_data["host"],
                port=pg_data["port"],
                database=pg_data["database"],
                user=pg_data["username"],
                passwd=pg_data["password"],
                dbtype="postgis",
                schema="public",
            )

            self.save(ds)
            self.store = self.catalog.get_store(store_name, self.workspace_name)
            self.store_name = store_name

    def publish_layer(
        self, layer_name, workspace_name, overwrite=False, epsg="3857", reload=False
    ):

        layer_exists = layer_name in self.layer_names
        # if layer_name in self.workspace_layers[workspace_name]:
        slug = self.get_slug(workspace_name, layer_name)
        if overwrite and layer_exists:
            print("Layer exists, deleting layer")
            try:

                self.layer = self.catalog.get_layer(slug)
                self.delete(self.layer)
                self.reload()

                layer_exists = False

            except Exception as e:
                print(e)
                print("Layer does not exist in workspace")
                layer_exists = False

        if not layer_exists:

            feature_type = self.catalog.publish_featuretype(
                layer_name,
                self.store,
                "EPSG:{}".format(str(epsg)),
                srs="EPSG:{}".format(str(epsg)),
            )
            self.save(feature_type)
            self.feature_type = feature_type

        else:
            print("layer already exists, using existing layer")

        if reload:
            self.get_layer(slug)

        self.layer_name = layer_name

    def publish_layergroup(self, name, layers, styles=(), bounds=None, workspace=None):
        layer_group = self.catalog.create_layergroup(
            name, layers, styles, bounds, workspace
        )
        self.save(layer_group)

    def save(self, save_object):
        return self.catalog.save(save_object)

    def close(self):
        self.catalog = None

    def delete(self, delete_object):
        self.catalog.delete(delete_object)

    def reload(self):
        self.catalog.reload()

    def upload_shapefile(self, layer_name, shapefile_path):
        path = shapefile_path.split(".shp")[0]
        shapefile = shapefile_and_friends(path)
        ft = self.catalog.create_featurestore(layer_name, shapefile, self.workspace)
        self.save(ft)

    def upload_sld(self, sld_name, workspace_name, sld, overwrite=True):
        style_exists = sld_name in self.style_names

        if overwrite and style_exists:
            print("Overwriting style")
            style = self.catalog.get_style(sld_name, workspace_name)
            self.delete(style)
            self.reload()
            style_exists = False

        if not style_exists:
            try:
                self.catalog.create_style(sld_name, sld, False, workspace_name, "sld11")
            except Exception as e:
                print(e)

                style = self.catalog.get_style(sld_name, workspace_name)
                self.delete(style)
                self.reload()
                self.catalog.create_style(sld_name, sld, False, workspace_name, "sld10")
            self.style_name = sld_name

        else:
            if style_exists:
                print("Style already exists, using current style")
                self.style_name = sld_name

    def set_sld_for_layer(self, workspace_name=None, style_name=None, use_custom=False):
        if not use_custom:
            workspace_name = self.workspace_name
            style_name = self.style_name
            self.style_slug = self.get_slug(workspace_name, style_name)

        else:
            if workspace_name is None:
                self.style_slug = style_name
            else:
                self.style_slug = self.get_slug(workspace_name, style_name)

        self.style = self.catalog.get_style(self.style_slug)

        print("Setting {} for {}".format(self.style.name, self.layer.name))
        self.layer.default_style = self.style
        self.save(self.layer)

    def get_slug(self, workspace, name):
        return "{}:{}".format(workspace, name)

    def get_slug_data(self, slug):
        workspace_name = slug.split(":")[0]
        layer_name = slug.split(":")[1]
        return workspace_name, layer_name

    def get_sld(self, layer_slug=None):
        if layer_slug is None:
            self.style = self.catalog.get_style(self.layer_slug)
        else:
            self.style = self.catalog.get_style(layer_slug)

        self.sld_body = self.style.sld_body
        return self.sld_body

    def get_layer_workspace(self, layer_name):
        return self.catalog.get_layer(layer_name).resource.workspace.name
Beispiel #49
0
def _geoserver_info_provision(url):
    from geoserver.catalog import Catalog
    from geosk.mdtools.geoserver_extra import Settings
    cat = Catalog(url,
        username="******",
        password="******"
    )
    gs_settings = Settings(cat)
    print "GeoServer service url is {0}".format(cat.service_url)
    contact_country = os.getenv("SERVICEPROVIDER_COUNTRY", "")
    print "contact_country is {0}".format(contact_country)
    provider_url = os.getenv("SERVICEPROVIDER_SITE", "")
    print "provider_url is {0}".format(provider_url)
    contact_role = os.getenv("SERVICEPROVIDER_INDIVIDUALNAME", "")
    print "contact_role is {0}".format(contact_role)
    contact_city = os.getenv("SERVICEPROVIDER_CITY", "")
    print "contact_city is {0}".format(contact_city)
    contact_instructions = os.getenv("SERVICEPROVIDER_INSTRUCTIONS", "")
    print "contact_instructions is {0}".format(contact_instructions)
    contact_position = os.getenv("SERVICEPROVIDER_POSITIONNAME", "")
    print "contact_position is {0}".format(contact_position)
    contact_fax = os.getenv("SERVICEPROVIDER_FAX", "")
    print "contact_fax is {0}".format(contact_fax)
    node_title = os.getenv("SERVICEPROVIDER_NODETITLE", "")
    print "node_title is {0}".format(node_title)
    contact_hours = os.getenv("SERVICEPROVIDER_HOURS", "")
    print "contact_hours is {0}".format(contact_hours)
    node_name = os.getenv("SERVICEPROVIDER_NODENAME", "")
    print "node_name is {0}".format(node_name)
    node_abstract = os.getenv("SERVICEPROVIDER_NODEABSTRACT", "")
    print "node_abstract is {0}".format(node_abstract)
    contact_address = os.getenv("SERVICEPROVIDER_ADDRESS", "")
    print "contact_address is {0}".format(contact_address)
    contact_email = os.getenv("SERVICEPROVIDER_EMAIL", "")
    print "contact_email is {0}".format(contact_email)
    contact_url = os.getenv("SERVICEPROVIDER_SITE", "")
    print "contact_url is {0}".format(contact_url)
    contact_stateprovince = os.getenv("SERVICEPROVIDER_STATEPROVINCE", "")
    print "contact_stateprovince is {0}".format(contact_stateprovince)
    provider_name = os.getenv("SERVICEPROVIDER_NAME", "")
    print "provider_name is {0}".format(provider_name)
    contact_postalcode = os.getenv("SERVICEPROVIDER_POSTALCODE", "")
    print "contact_postalcode is {0}".format(contact_postalcode)
    contact_phone = os.getenv("SERVICEPROVIDER_PHONE", "")
    print "contact_phone is {0}".format(contact_phone)
    contact_name = os.getenv("SERVICEPROVIDER_NAME", "")
    print "contact_name is {0}".format(contact_name)
    node_keywords = os.getenv("SERVICEPROVIDER_NODEKEYWORDS", "")
    print "node_keywords is {0}".format(node_keywords)
    contact = {
        "contact": {
            "address": contact_address,
            "addressCity": contact_city,
            "addressCountry": contact_country,
            "addressPostalCode": contact_postalcode,
            "addressState": contact_stateprovince,
            "addressType": None,
            "contactEmail": contact_email,
            "contactFacsimile": contact_fax,
            "contactOrganization": provider_name,
            "contactPerson": contact_name,
            "contactPosition": contact_position,
            "contactVoice": contact_phone
        }
    }

    def get_service_json(service):
        # get current values
        service_base = gs_settings.get_service_config(service)
        service_base[service]['name'] = "{0} - {1}".format(
            node_name,
            service
        )
        service_base[service]['title'] = "{0} - {1}".format(
            node_title,
            service
        )
        service_base[service]['maintainer'] = provider_url if provider_url is not None and provider_url else os.environ['SITEURL']
        service_base[service]['abstrct'] = node_abstract if node_abstract is not None else '-'
        service_base[service]['onlineResource'] = os.environ['SITEURL']
        return service_base

    gs_settings.update_contact(contact)
    gs_settings.update_service('wms', get_service_json('wms'))
    gs_settings.update_service('wfs', get_service_json('wfs'))
    gs_settings.update_service('wcs', get_service_json('wcs'))
Beispiel #50
0
 def setUp(self):
     self.cat = Catalog("http://localhost:8080/geoserver/rest")
Beispiel #51
0
def gs_slurp(ignore_errors=True,
             verbosity=1,
             console=None,
             owner=None,
             workspace=None):
    """Configure the layers available in GeoServer in GeoNode.

       It returns a list of dictionaries with the name of the layer,
       the result of the operation and the errors and traceback if it failed.
    """
    if console is None:
        console = open(os.devnull, 'w')

    if verbosity > 1:
        print >> console, "Inspecting the available layers in GeoServer ..."
    url = "%srest" % settings.GEOSERVER_BASE_URL
    cat = Catalog(url, _user, _password)
    if workspace is not None:
        workspace = cat.get_workspace(workspace)
    resources = cat.get_resources(workspace=workspace)
    number = len(resources)
    if verbosity > 1:
        msg = "Found %d layers, starting processing" % number
        print >> console, msg
    output = []
    for i, resource in enumerate(resources):
        name = resource.name
        store = resource.store
        workspace = store.workspace
        try:
            # Avoid circular import problem
            from geonode.layers.models import Layer
            layer, created = Layer.objects.get_or_create(
                name=name,
                defaults={
                    "workspace": workspace.name,
                    "store": store.name,
                    "storeType": store.resource_type,
                    "typename": "%s:%s" % (workspace.name, resource.name),
                    "title": resource.title or 'No title provided',
                    "abstract": resource.abstract or 'No abstract provided',
                    "owner": owner,
                    "uuid": str(uuid.uuid4())
                })

            layer.save()

        except Exception, e:
            if ignore_errors:
                status = 'failed'
                exception_type, error, traceback = sys.exc_info()
            else:
                if verbosity > 0:
                    msg = "Stopping process because --ignore-errors was not set and an error was found."
                    print >> sys.stderr, msg
                raise Exception('Failed to process %s' % resource.name,
                                e), None, sys.exc_info()[2]
        else:
            if created:
                layer.set_default_permissions()
                status = 'created'
            else:
                status = 'updated'

        msg = "[%s] Layer %s (%d/%d)" % (status, name, i + 1, number)
        info = {'name': name, 'status': status}
        if status == 'failed':
            info['traceback'] = traceback
            info['exception_type'] = exception_type
            info['error'] = error
        output.append(info)
        if verbosity > 0:
            print >> console, msg
Beispiel #52
0
class UploaderBase(GeoNodeLiveTestSupport):

    type = 'layer'

    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        if os.path.exists('integration_settings.py'):
            os.unlink('integration_settings.py')

    def setUp(self):
        # await startup
        cl = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD)
        for i in range(10):
            time.sleep(.2)
            try:
                cl.get_html('/', debug=False)
                break
            except BaseException:
                pass

        self.client = Client(GEONODE_URL, GEONODE_USER, GEONODE_PASSWD)
        self.catalog = Catalog(
            GEOSERVER_URL + 'rest',
            GEOSERVER_USER,
            GEOSERVER_PASSWD,
            retries=ogc_server_settings.MAX_RETRIES,
            backoff_factor=ogc_server_settings.BACKOFF_FACTOR)

        settings.DATABASES['default']['NAME'] = DB_NAME

        connections['default'].settings_dict['ATOMIC_REQUESTS'] = False
        connections['default'].connect()

        self._tempfiles = []

    def _post_teardown(self):
        pass

    def tearDown(self):
        connections.databases['default']['ATOMIC_REQUESTS'] = False

        for temp_file in self._tempfiles:
            os.unlink(temp_file)

        # Cleanup
        Upload.objects.all().delete()
        Layer.objects.all().delete()
        Map.objects.all().delete()
        Document.objects.all().delete()

        if settings.OGC_SERVER['default'].get("GEOFENCE_SECURITY_ENABLED",
                                              False):
            from geonode.security.utils import purge_geofence_all
            purge_geofence_all()

    def check_layer_geonode_page(self, path):
        """ Check that the final layer page render's correctly after
        an layer is uploaded """
        # the final url for uploader process. This does a redirect to
        # the final layer page in geonode
        resp, _ = self.client.get_html(path)
        self.assertEqual(resp.status_code, 200)
        self.assertTrue('content-type' in resp.headers)

    def check_layer_geoserver_caps(self, type_name):
        """ Check that a layer shows up in GeoServer's get
        capabilities document """
        # using owslib
        wms = get_wms(type_name=type_name,
                      username=GEOSERVER_USER,
                      password=GEOSERVER_PASSWD)
        ws, layer_name = type_name.split(':')
        self.assertTrue(layer_name in wms.contents,
                        '%s is not in %s' % (layer_name, wms.contents))

    def check_layer_geoserver_rest(self, layer_name):
        """ Check that a layer shows up in GeoServer rest api after
        the uploader is done"""
        # using gsconfig to test the geoserver rest api.
        layer = self.catalog.get_layer(layer_name)
        self.assertIsNotNone(layer is not None)

    def check_and_pass_through_timestep(self, redirect_to):
        time_step = upload_step('time')
        srs_step = upload_step('srs')
        if srs_step in redirect_to:
            resp = self.client.make_request(redirect_to)
        else:
            self.assertTrue(time_step in redirect_to)
        resp = self.client.make_request(redirect_to)
        token = self.client.get_csrf_token(True)
        self.assertEqual(resp.status_code, 200)
        resp = self.client.make_request(redirect_to,
                                        {'csrfmiddlewaretoken': token},
                                        ajax=True)
        return resp, resp.json()

    def complete_raster_upload(self, file_path, resp, data):
        return self.complete_upload(file_path, resp, data, is_raster=True)

    def check_save_step(self, resp, data):
        """Verify the initial save step"""
        self.assertEqual(resp.status_code, 200)
        self.assertTrue(isinstance(data, dict))
        # make that the upload returns a success True key
        self.assertTrue(data['success'], 'expected success but got %s' % data)
        self.assertTrue('redirect_to' in data)

    def complete_upload(self, file_path, resp, data, is_raster=False):
        """Method to check if a layer was correctly uploaded to the
        GeoNode.

        arguments: file path, the django http response

           Checks to see if a layer is configured in Django
           Checks to see if a layer is configured in GeoServer
               checks the Rest API
               checks the get cap document """

        layer_name, ext = os.path.splitext(os.path.basename(file_path))

        if not isinstance(data, string_types):
            self.check_save_step(resp, data)

            layer_page = self.finish_upload(data['redirect_to'], layer_name,
                                            is_raster)

            self.check_layer_complete(layer_page, layer_name)

    def finish_upload(self,
                      current_step,
                      layer_name,
                      is_raster=False,
                      skip_srs=False):
        if not is_raster and _ALLOW_TIME_STEP:
            resp, data = self.check_and_pass_through_timestep(current_step)
            self.assertEqual(resp.status_code, 200)
            if not isinstance(data, string_types):
                if data['success']:
                    self.assertTrue(data['success'],
                                    'expected success but got %s' % data)
                    self.assertTrue('redirect_to' in data)
                    current_step = data['redirect_to']
                    self.wait_for_progress(data.get('progress'))

        if not is_raster and not skip_srs:
            self.assertTrue(upload_step('srs') in current_step)
            # if all is good, the srs step will redirect to the final page
            final_step = current_step.replace('srs', 'final')
            resp = self.client.make_request(final_step)
        else:
            self.assertTrue(upload_step('final') in current_step)
            resp = self.client.get(current_step)

        self.assertEqual(resp.status_code, 200)
        try:
            c = resp.json()
            url = c['url']
            url = unquote(url)
            # and the final page should redirect to the layer page
            # @todo - make the check match completely (endswith at least)
            # currently working around potential 'orphaned' db tables
            self.assertTrue(layer_name in url,
                            'expected %s in URL, got %s' % (layer_name, url))
            return url
        except BaseException:
            return current_step

    def check_upload_model(self, original_name):
        # we can only test this if we're using the same DB as the test instance
        if not settings.OGC_SERVER['default']['DATASTORE']:
            return
        upload = None
        try:
            upload = Upload.objects.filter(
                name__icontains=str(original_name)).last()
            # Making sure the Upload object is present on the DB and
            # the import session is COMPLETE
            if upload and not upload.complete:
                logger.warning("Upload not complete for Layer %s" %
                               original_name)
        except Upload.DoesNotExist:
            self.fail('expected to find Upload object for %s' % original_name)

    def check_layer_complete(self, layer_page, original_name):
        '''check everything to verify the layer is complete'''
        self.check_layer_geonode_page(layer_page)
        # @todo use the original_name
        # currently working around potential 'orphaned' db tables
        # this grabs the name from the url (it might contain a 0)
        type_name = os.path.basename(layer_page)
        layer_name = original_name
        try:
            layer_name = type_name.split(':')[1]
        except BaseException:
            pass

        # work around acl caching on geoserver side of things
        caps_found = False
        for i in range(10):
            time.sleep(.5)
            try:
                self.check_layer_geoserver_caps(type_name)
                caps_found = True
            except BaseException:
                pass
        if not caps_found:
            logger.warning(
                "Could not recognize Layer %s on GeoServer WMS Capa" %
                original_name)
        self.check_layer_geoserver_rest(layer_name)
        self.check_upload_model(layer_name)

    def check_invalid_projection(self, layer_name, resp, data):
        """ Makes sure that we got the correct response from an layer
        that can't be uploaded"""
        self.assertTrue(resp.status_code, 200)
        if not isinstance(data, string_types):
            self.assertTrue(data['success'])
            srs_step = upload_step("srs")
            if "srs" in data['redirect_to']:
                self.assertTrue(srs_step in data['redirect_to'])
                resp, soup = self.client.get_html(data['redirect_to'])
                # grab an h2 and find the name there as part of a message saying it's
                # bad
                h2 = soup.find_all(['h2'])[0]
                self.assertTrue(str(h2).find(layer_name))

    def check_upload_complete(self, layer_name, resp, data):
        """ Makes sure that we got the correct response from an layer
        that can't be uploaded"""
        self.assertTrue(resp.status_code, 200)
        if not isinstance(data, string_types):
            self.assertTrue(data['success'])
            final_step = upload_step("final")
            if "final" in data['redirect_to']:
                self.assertTrue(final_step in data['redirect_to'])

    def upload_folder_of_files(self, folder, final_check, session_ids=None):

        mains = ('.tif', '.shp', '.zip', '.asc')

        def is_main(_file):
            _, ext = os.path.splitext(_file)
            return (ext.lower() in mains)

        for main in filter(is_main, os.listdir(folder)):
            # get the abs path to the file
            _file = os.path.join(folder, main)
            base, _ = os.path.splitext(_file)
            resp, data = self.client.upload_file(_file)
            if session_ids is not None:
                if not isinstance(data, string_types) and data.get('url'):
                    session_id = re.search(r'.*id=(\d+)',
                                           data.get('url')).group(1)
                    if session_id:
                        session_ids += [session_id]
            if not isinstance(data, string_types):
                self.wait_for_progress(data.get('progress'))
            final_check(base, resp, data)

    def upload_file(self,
                    fname,
                    final_check,
                    check_name=None,
                    session_ids=None):
        if not check_name:
            check_name, _ = os.path.splitext(fname)
        resp, data = self.client.upload_file(fname)
        if session_ids is not None:
            if not isinstance(data, string_types):
                if data.get('url'):
                    session_id = re.search(r'.*id=(\d+)',
                                           data.get('url')).group(1)
                    if session_id:
                        session_ids += [session_id]
        if not isinstance(data, string_types):
            self.wait_for_progress(data.get('progress'))
        final_check(check_name, resp, data)

    def wait_for_progress(self, progress_url):
        if progress_url:
            resp = self.client.get(progress_url)
            assert resp.getcode() == 200, 'Invalid progress status code'
            json_data = resp.json()
            # "COMPLETE" state means done
            if json_data.get('state', '') == 'RUNNING':
                time.sleep(0.1)
                self.wait_for_progress(progress_url)

    def temp_file(self, ext):
        fd, abspath = tempfile.mkstemp(ext)
        self._tempfiles.append(abspath)
        return fd, abspath

    def make_csv(self, *rows):
        fd, abspath = self.temp_file('.csv')
        fp = os.fdopen(fd, 'wb')
        out = csv.writer(fp)
        for r in rows:
            out.writerow(r)
        fp.close()
        return abspath
Beispiel #53
0
class CatalogTests(unittest.TestCase):
    def setUp(self):
        self.cat = Catalog("http://localhost:8080/geoserver/rest")

    def testAbout(self):
        about_html = self.cat.about()
        self.assertTrue(
            '<html xmlns="http://www.w3.org/1999/xhtml"' in about_html)

    def testGSVersion(self):
        version = self.cat.gsversion()
        pat = re.compile('\d\.\d(\.[\dx]|-SNAPSHOT)')
        self.assertTrue(pat.match('2.2.x'))
        self.assertTrue(pat.match('2.3.2'))
        self.assertTrue(pat.match('2.3-SNAPSHOT'))
        self.assertFalse(pat.match('2.3.y'))
        self.assertFalse(pat.match('233'))
        self.assertTrue(pat.match(version))

    def testWorkspaces(self):
        self.assertEqual(7, len(self.cat.get_workspaces()))
        # marking out test since geoserver default workspace is not consistent
        # self.assertEqual("cite", self.cat.get_default_workspace().name)
        self.assertEqual("topp", self.cat.get_workspace("topp").name)

    def testStores(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        self.assertEqual(9, len(self.cat.get_stores()))
        self.assertEqual(2, len(self.cat.get_stores(topp)))
        self.assertEqual(2, len(self.cat.get_stores(sf)))
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile", topp).name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile").name)
        self.assertEqual("states_shapefile",
                         self.cat.get_store("states_shapefile").name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem", sf).name)
        self.assertEqual("sfdem", self.cat.get_store("sfdem").name)

    def testResources(self):
        topp = self.cat.get_workspace("topp")
        sf = self.cat.get_workspace("sf")
        states = self.cat.get_store("states_shapefile", topp)
        sfdem = self.cat.get_store("sfdem", sf)
        self.assertEqual(19, len(self.cat.get_resources()))
        self.assertEqual(1, len(self.cat.get_resources(states)))
        self.assertEqual(5, len(self.cat.get_resources(workspace=topp)))
        self.assertEqual(1, len(self.cat.get_resources(sfdem)))
        self.assertEqual(6, len(self.cat.get_resources(workspace=sf)))

        self.assertEqual("states",
                         self.cat.get_resource("states", states).name)
        self.assertEqual("states",
                         self.cat.get_resource("states", workspace=topp).name)
        self.assertEqual("states", self.cat.get_resource("states").name)
        states = self.cat.get_resource("states")

        fields = [
            states.title, states.abstract, states.native_bbox,
            states.latlon_bbox, states.projection, states.projection_policy
        ]

        self.assertFalse(None in fields, str(fields))
        self.assertFalse(len(states.keywords) == 0)
        self.assertFalse(len(states.attributes) == 0)
        self.assertTrue(states.enabled)

        self.assertEqual("sfdem", self.cat.get_resource("sfdem", sfdem).name)
        self.assertEqual("sfdem",
                         self.cat.get_resource("sfdem", workspace=sf).name)
        self.assertEqual("sfdem", self.cat.get_resource("sfdem").name)

    def testLayers(self):
        expected = set([
            "Arc_Sample", "Pk50095", "Img_Sample", "mosaic", "sfdem",
            "bugsites", "restricted", "streams", "archsites", "roads",
            "tasmania_roads", "tasmania_water_bodies",
            "tasmania_state_boundaries", "tasmania_cities", "states",
            "poly_landmarks", "tiger_roads", "poi", "giant_polygon"
        ])
        actual = set(l.name for l in self.cat.get_layers())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layer list did not match expected! (Extras: %s) (Missing: %s)" % (
            extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        states = self.cat.get_layer("states")

        self.assert_("states", states.name)
        self.assert_(isinstance(states.resource, ResourceInfo))
        self.assertEqual(set(s.name for s in states.styles),
                         set(['pophatch', 'polygon']))
        self.assertEqual(states.default_style.name, "population")

    def testLayerGroups(self):
        expected = set(["tasmania", "tiger-ny", "spearfish"])
        actual = set(l.name for l in self.cat.get_layergroups())
        missing = expected - actual
        extras = actual - expected
        message = "Actual layergroup list did not match expected! (Extras: %s) (Missing: %s)" % (
            extras, missing)
        self.assert_(len(expected ^ actual) == 0, message)

        tas = self.cat.get_layergroup("tasmania")

        self.assert_("tasmania", tas.name)
        self.assert_(isinstance(tas, LayerGroup))
        self.assertEqual(tas.layers, [
            'tasmania_state_boundaries', 'tasmania_water_bodies',
            'tasmania_roads', 'tasmania_cities'
        ], tas.layers)
        self.assertEqual(tas.styles, [None, None, None, None], tas.styles)

    def testStyles(self):
        self.assertEqual(20, len(self.cat.get_styles()))
        self.assertEqual("population", self.cat.get_style("population").name)
        self.assertEqual("popshade.sld",
                         self.cat.get_style("population").filename)
        self.assertEqual("population",
                         self.cat.get_style("population").sld_name)
        self.assert_(self.cat.get_style('non-existing-style') is None)

    def testEscaping(self):
        # GSConfig is inconsistent about using exceptions vs. returning None
        # when a resource isn't found.
        # But the basic idea is that none of them should throw HTTP errors from
        # misconstructed URLS
        self.cat.get_style("best style ever")
        self.cat.get_workspace("best workspace ever")
        try:
            self.cat.get_store(workspace="best workspace ever",
                               name="best store ever")
            self.fail('expected exception')
        except FailedRequestError, fre:
            self.assertEqual('No store found named: best store ever',
                             fre.message)
        try:
            self.cat.get_resource(workspace="best workspace ever",
                                  store="best store ever",
                                  name="best resource ever")
        except FailedRequestError, fre:
            self.assertEqual('No store found named: best store ever',
                             fre.message)
def createDataStore(name, filename, format="shapefile"):
    cat = Catalog("{0}/rest/".format(geoserver_connection), geoserver_username,
                  geoserver_password)
    ws = cat.get_workspace(workspace)
    msg = ""
    if format == "shapefile":
        shapefile = shapefile_and_friends(filename)
        try:
            cat.create_featurestore(name, shapefile, workspace)
        except ConflictingDataError as inst:
            msg = str(inst)
        except:
            raise
        resource = cat.get_resource(name, workspace=ws)
        resource.projection = 'EPSG:4326'
        cat.save(resource)
        resource.projection_policy = 'REPROJECT_TO_DECLARED'
        cat.save(resource)
        resource.refresh()
        bbox = resource.latlon_bbox[:4]
        solr_geom = 'ENVELOPE({0},{1},{2},{3})'.format(bbox[0], bbox[1],
                                                       bbox[3], bbox[2])
        return {
            "solr_geom": solr_geom,
            "msg": msg,
            "resource_type": resource.resource_type
        }
    elif format == "image":
        if cat.get_store(name):
            newcs = cat.get_store(name, workspace=ws)
            msg = "Geoserver datastore already existed. Update existing datastore."
        else:
            newcs = cat.create_coveragestore2(name, ws)
        newcs.type = "GeoTIFF"
        newcs.url = filename
        cat.save(newcs)
        # add coverage
        url = "{0}/rest/workspaces/{1}/coveragestores/{2}/coverages.json"
        url = url.format(geoserver_connection, ws.name, name)
        headers = {"Content-Type": "application/json"}
        coverageName = os.path.splitext(os.path.basename(filename))[0]
        postdata = {
            "coverage": {
                "nativeCoverageName": coverageName,
                "name": coverageName,
                'projectionPolicy': 'REPROJECT_TO_DECLARED',
                'srs': 'EPSG:4326'
            }
        }
        requests.post(url,
                      json.dumps(postdata),
                      headers=headers,
                      auth=(geoserver_username, geoserver_password))
        # Reproject
        resource = cat.get_resource(name, workspace=ws)
        url = "{0}/rest/workspaces/{1}/coveragestores/{2}/coverages/{2}?{3}"
        parameters = "recalculate=nativebbox,latlonbbox"
        url = url.format(geoserver_connection, ws.name, name, parameters)
        headers = {"Content-Type": "application/x-www-form-urlencoded"}
        requests.post(url,
                      headers=headers,
                      auth=(geoserver_username, geoserver_password))
        # resource.refresh()
        bbox = resource.latlon_bbox[:4]
        solr_geom = 'ENVELOPE({0},{1},{2},{3})'.format(bbox[0], bbox[1],
                                                       bbox[3], bbox[2])
        return {
            "solr_geom": solr_geom,
            "msg": msg,
            "resource_type": resource.resource_type
        }
    return True
Beispiel #55
0
def _register_cascaded_layers(service, owner=None):
    """
    Register layers for a cascading WMS
    """
    if service.type == 'WMS' or service.type == "OWS":
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                      _user, _password)
        # Can we always assume that it is geonode?
        # Should cascading layers have a separate workspace?
        cascade_ws = cat.get_workspace(service.name)
        if cascade_ws is None:
            cascade_ws = cat.create_workspace(service.name, 'cascade')
        try:
            store = cat.get_store(service.name, cascade_ws)
        except Exception:
            store = cat.create_wmsstore(service.name, cascade_ws)
            cat.save(store)
        wms = WebMapService(service.base_url)
        layers = list(wms.contents)

        count = 0
        for layer in layers:
            lyr = cat.get_resource(layer, store, cascade_ws)
            if lyr is None:
                if service.type in ["WMS", "OWS"]:
                    resource = cat.create_wmslayer(cascade_ws, store, layer)
                elif service.type == "WFS":
                    resource = cat.create_wfslayer(cascade_ws, store, layer)

                if resource:
                    bbox = resource.latlon_bbox
                    cascaded_layer, created = Layer.objects.get_or_create(
                        typename="%s:%s" % (cascade_ws.name, resource.name),
                        service=service,
                        defaults={
                            "name": resource.name,
                            "workspace": cascade_ws.name,
                            "store": store.name,
                            "storeType": store.resource_type,
                            "title": resource.title or 'No title provided',
                            "abstract": resource.abstract
                            or 'No abstract provided',
                            "owner": None,
                            "uuid": str(uuid.uuid4()),
                            "bbox_x0": bbox[0],
                            "bbox_x1": bbox[1],
                            "bbox_y0": bbox[2],
                            "bbox_y1": bbox[3],
                        })

                    if created:
                        cascaded_layer.save()
                        if cascaded_layer is not None and cascaded_layer.bbox is None:
                            cascaded_layer._populate_from_gs(
                                gs_resource=resource)
                        cascaded_layer.set_default_permissions()

                        service_layer, created = ServiceLayer.objects.get_or_create(
                            service=service, typename=cascaded_layer.name)
                        service_layer.layer = cascaded_layer
                        service_layer.title = cascaded_layer.title,
                        service_layer.description = cascaded_layer.abstract,
                        service_layer.styles = cascaded_layer.styles
                        service_layer.save()

                        count += 1
                    else:
                        logger.error(
                            "Resource %s from store %s could not be saved as layer"
                            % (layer, store.name))
        message = "%d Layers Registered" % count
        return_dict = {'status': 'ok', 'msg': message}
        return HttpResponse(json.dumps(return_dict),
                            mimetype='application/json',
                            status=200)
    elif service.type == 'WCS':
        return HttpResponse('Not Implemented (Yet)', status=501)
    else:
        return HttpResponse('Invalid Service Type', status=400)
Beispiel #56
0
 def __init__(self):
     models.Manager.__init__(self)
     url = ogc_server_settings.rest
     self.gs_catalog = Catalog(url, _user, _password)
Beispiel #57
0
def get_selected_raster(db, region, variable, date):
    # logging.info(str(region)+','+str(variable)+','+str(date))
    try:
        # logging.info('Connecting to the database')
        conn = psycopg2.connect(
            "dbname={0} user={1} host={2} password={3}".format(
                db, cfg.connection['user'], cfg.connection['host'],
                cfg.connection['password']))
        cur = conn.cursor()

        storename = db + '_' + region + '_' + variable + '_' + date

        cat = Catalog(cfg.geoserver['rest_url'],
                      username=cfg.geoserver['user'],
                      password=cfg.geoserver['password'],
                      disable_ssl_certificate_validation=True)

        try:
            # logging.info('Check if the layer exists')
            something = cat.get_store(storename, cfg.geoserver['workspace'])
            if not something:
                # logging.info('Layer doesnt exist')
                print("No store")
                raise Exception
            else:
                mean, stddev, min, max = get_vic_summary(
                    db, region, variable, date)
                # logging.info(str(mean)+str(stddev)+str(min)+str(max))
                return storename, mean, stddev, min, max
        except Exception as e:
            # logging.info('Entering geoserver code')
            # logging.error('Error at failed request ' + str(e))
            try:
                # logging.info('Starting the geoserver stuff')
                sql = """SELECT ST_AsGDALRaster(rast, 'GTiff') as tiff FROM {0}.{1} WHERE id={2}""".format(
                    region, variable, date)
                cur.execute(sql)
                data = cur.fetchall()
                # logging.info(str(data))

                mean, stddev, min, max = get_vic_summary(
                    db, region, variable, date)
                # logging.info('Work you piece ...')
                rest_url = cfg.geoserver['rest_url']
                # logging.info(str(rest_url))

                if rest_url[-1] != "/":
                    rest_url = rest_url + '/'

                headers = {
                    'Content-type': 'image/tiff',
                }

                request_url = '{0}workspaces/{1}/coveragestores/{2}/file.geotiff'.format(
                    rest_url, cfg.geoserver['workspace'],
                    storename)  # Creating the rest url
                # logging.info('Get the username and password')
                user = cfg.geoserver['user']
                password = cfg.geoserver['password']
                # logging.info('Right before the put command')
                requests.put(
                    request_url,
                    verify=False,
                    headers=headers,
                    data=data[0][0],
                    auth=(user,
                          password))  # Creating the resource on the geoserver

                # logging.info(request_url)
                return storename, mean, stddev, min, max

            except Exception as er:
                # logging.info('Error at uplaoding tiff '+ str(e))
                return str(er) + ' This is while adding the raster layer.'

    except Exception as err:
        # logging.info(str(err) + ' This is generic catch all')
        return str(err) + ' This is the generic one'
from geoserver.catalog import Catalog
cat = Catalog("http://172.18.77.15:8089/geoserver/rest",
              username="******",
              password="******")
url = 'http://172.18.77.15:8089/geoserver/%s/wms?service=WMS&version=1.1.0&request=GetMap&layers=%s:%s&styles=&bbox=%s,%s,%s,%s&width=80&height=80&srs=%s&format=image/png'
rr = cat.get_resources(workspace=cat.get_workspace('shenzhen'))

for r in rr:
    print r.name,url%(r.workspace.name,r.workspace.name,r.name,r.native_bbox[0],r.native_bbox[2],\
                r.native_bbox[1],r.native_bbox[3],r.native_bbox[4] if r.native_bbox[4] is not None \
                and r.native_bbox[4].startswith('EPSG') else ('EPSG:4326' if float(r.native_bbox[0])<10000 else 'EPSG:2309' ) )
Beispiel #59
0
#!/usr/bin/env python
'''
gsconfig is a python library for manipulating a GeoServer instance via the GeoServer RESTConfig API.

The project is distributed under a MIT License .
'''

__author__ = "David Winslow"
__copyright__ = "Copyright 2012-2015 Boundless, Copyright 2010-2012 OpenPlans"
__license__ = "MIT"

from geoserver.catalog import Catalog

cat = Catalog("http://localhost:8080/geoserver/rest", "admin", "geoserver")

native_bbox = \
    ['589434.856', '4914006.338', '609527.21', '4928063.398', 'EPSG:26713']
latlon_bbox = ['-103.877', '44.371', '-103.622', '44.5', 'EPSG:4326']

sf = cat.get_workspace('sf')
for rs in cat.get_resources(workspace=sf):
    rs.native_bbox = native_bbox
    rs.latlon_bbox = latlon_bbox
    cat.save(rs)
Beispiel #60
0
def _register_cascaded_service(url,
                               type,
                               name,
                               username,
                               password,
                               wms=None,
                               owner=None,
                               parent=None):
    """
    Register a service as cascading WMS
    """

    try:
        service = Service.objects.get(base_url=url)
        return_dict = {}
        return_dict['service_id'] = service.pk
        return_dict['msg'] = "This is an existing Service"
        return HttpResponse(json.dumps(return_dict),
                            mimetype='application/json',
                            status=200)
    except:
        # TODO: Handle this error properly
        pass

    if wms is None:
        wms = WebMapService(url)
    # TODO: Make sure we are parsing all service level metadata
    # TODO: Handle for setting ServiceProfiletRole
    service = Service.objects.create(base_url=url,
                                     type=type,
                                     method='C',
                                     name=name,
                                     version=wms.identification.version,
                                     title=wms.identification.title,
                                     abstract=wms.identification.abstract,
                                     online_resource=wms.provider.url,
                                     owner=owner,
                                     parent=parent)

    service.keywords = ','.join(wms.identification.keywords)
    service.save()
    service.set_default_permissions()

    if type in ['WMS', 'OWS']:
        # Register the Service with GeoServer to be cascaded
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                      _user, _password)
        cascade_ws = cat.get_workspace(name)
        if cascade_ws is None:
            cascade_ws = cat.create_workspace(name,
                                              "http://geonode.org/cascade")

        # TODO: Make sure there isn't an existing store with that name, and
        # deal with it if there is

        try:
            cascade_store = cat.get_store(name, cascade_ws)
        except:
            cascade_store = cat.create_wmsstore(name, cascade_ws, username,
                                                password)
            cascade_store.capabilitiesURL = url
            cascade_store.type = "WMS"
            cat.save(cascade_store)
        available_resources = cascade_store.get_resources(available=True)

    elif type == 'WFS':
        # Register the Service with GeoServer to be cascaded
        cat = Catalog(settings.OGC_SERVER['default']['LOCATION'] + "rest",
                      _user, _password)
        # Can we always assume that it is geonode?
        cascade_ws = cat.get_workspace(settings.CASCADE_WORKSPACE)
        if cascade_ws is None:
            cascade_ws = cat.create_workspace(settings.CASCADE_WORKSPACE,
                                              "http://geonode.org/cascade")

        try:
            wfs_ds = cat.get_store(name, cascade_ws)
        except:
            wfs_ds = cat.create_datastore(name, cascade_ws)
            connection_params = {
                "WFSDataStoreFactory:MAXFEATURES": "0",
                "WFSDataStoreFactory:TRY_GZIP": "true",
                "WFSDataStoreFactory:PROTOCOL": "false",
                "WFSDataStoreFactory:LENIENT": "true",
                "WFSDataStoreFactory:TIMEOUT": "3000",
                "WFSDataStoreFactory:BUFFER_SIZE": "10",
                "WFSDataStoreFactory:ENCODING": "UTF-8",
                "WFSDataStoreFactory:WFS_STRATEGY": "nonstrict",
                "WFSDataStoreFactory:GET_CAPABILITIES_URL": url,
            }
            if username and password:
                connection_params["WFSDataStoreFactory:USERNAME"] = username
                connection_params["WFSDataStoreFactory:PASSWORD"] = password

            wfs_ds.connection_parameters = connection_params
            cat.save(wfs_ds)
        available_resources = wfs_ds.get_resources(available=True)

        # Save the Service record
        service, created = Service.objects.get_or_create(type=type,
                                                         method='C',
                                                         base_url=url,
                                                         name=name,
                                                         owner=owner)
        service.save()
        service.set_default_permissions()
    elif type == 'WCS':
        return HttpResponse('Not Implemented (Yet)', status=501)
    else:
        return HttpResponse('Invalid Method / Type combo: ' +
                            'Only Cascaded WMS, WFS and WCS supported',
                            mimetype="text/plain",
                            status=400)

    message = "Service %s registered" % service.name
    return_dict = [{
        'status': 'ok',
        'msg': message,
        'service_id': service.pk,
        'service_name': service.name,
        'service_title': service.title,
        'available_layers': available_resources
    }]

    if settings.USE_QUEUE:
        # Create a layer import job
        WebServiceHarvestLayersJob.objects.get_or_create(service=service)
    else:
        _register_cascaded_layers(service)
    return HttpResponse(json.dumps(return_dict),
                        mimetype='application/json',
                        status=200)