def testUnicodeUrl(self): """ Tests that the geoserver.support.url function support unicode strings. """ # Test the url function with unicode seg = ['workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml'] u = url(base=self.cat.service_url, seg=seg) self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml") # Test the url function with normal string seg = ['workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml'] u = url(base=self.cat.service_url, seg=seg) self.assertEqual(u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
def get_style(self, name): try: style_url = url(self.service_url, ["styles", name + ".xml"]) dom = self.get_xml(style_url) return Style(self, dom.find("name").text) except FailedRequestError: return None
def _create_coveragestore_external(self, name, url_filepath, workspace=None, overwrite=False, extension="geotiff"): if not overwrite: try: self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = { "Content-type": "text/plain", "Accept": "application/xml" } cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, "external." + extension], { "configure" : "first", "coverageName" : name}) headers, response = self.http.request(cs_url, "PUT", url_filepath, headers) self._cache.clear() if headers.status != 201: raise UploadError(response)
def mosaic_granules(self, coverage, store, filter=None, limit=None): """Print granules of an existing imagemosaic""" params = dict() if filter is not None: params["filter"] = filter if limit is not None: params["limit"] = limit cs_url = url( self.service_url, [ "workspaces", store.workspace.name, "coveragestores", store.name, "coverages", coverage, "index/granules.json", ], params, ) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules.json headers = {"Content-type": "application/json", "Accept": "application/json"} headers, response = self.http.request(cs_url, "GET", None, headers) self._cache.clear() granules = json.loads(response, object_hook=_decode_dict) return granules
def create_style(self, name, data, overwrite=False): if overwrite == False and self.get_style(name) is not None: raise ConflictingDataError("There is already a style named %s" % name) headers = {"Content-type": "application/vnd.ogc.sld+xml", "Accept": "application/xml"} if overwrite: style_url = url(self.service_url, ["styles", name + ".sld"]) headers, response = self.http.request(style_url, "PUT", data, headers) else: style_url = url(self.service_url, ["styles"], dict(name=name)) headers, response = self.http.request(style_url, "POST", data, headers) self._cache.clear() if headers.status < 200 or headers.status > 299: raise UploadError(response)
def get_layergroup(self, name=None): try: group_url = url(self.service_url, ["layergroups", name + ".xml"]) group = self.get_xml(group_url) return LayerGroup(self, group.find("name").text) except FailedRequestError: return None
def publish_featuretype(self, name, store, native_crs, srs=None, jdbc_virtual_table=None): '''Publish a featuretype from data in an existing store''' # @todo native_srs doesn't seem to get detected, even when in the DB # metadata (at least for postgis in geometry_columns) and then there # will be a misconfigured layer if native_crs is None: raise ValueError("must specify native_crs") srs = srs or native_crs feature_type = FeatureType(self, store.workspace, store, name) # because name is the in FeatureType base class, work around that # and hack in these others that don't have xml properties feature_type.dirty['name'] = name feature_type.dirty['srs'] = srs feature_type.dirty['nativeCRS'] = native_crs feature_type.enabled = True feature_type.title = name headers = { "Content-type": "application/xml", "Accept": "application/xml" } resource_url=store.resource_url if jdbc_virtual_table is not None: feature_type.metadata=({'JDBC_VIRTUAL_TABLE':jdbc_virtual_table}) params = dict() resource_url=url(self.service_url, ["workspaces", store.workspace.name, "datastores", store.name, "featuretypes.json"], params) headers, response = self.http.request(resource_url, "POST", feature_type.message(), headers) feature_type.fetch() return feature_type
def create_coveragestore3(self, name, data_url, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: pass if workspace is None: workspace = self.get_default_workspace() headers = { "Content-type": "text/plain", "Accept": "application/xml" } ext = "geotiff" cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, "external." + ext], { "configure" : "first", "coverageName" : name}) headers, response = self.http.request(cs_url, "PUT", data_url, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) return headers.status
def href(self): query = {'name': self.name} path_parts = ['layergroups'] if self.workspace is not None: workspace_name = getattr(self.workspace, 'name', self.workspace) path_parts = ["workspaces", workspace_name] + path_parts return url(self.catalog.service_url, path_parts, query)
def add_data_to_store(self, store, name, data, workspace=None, overwrite = False, charset = None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % (store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset message = open(bundle) headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = url(self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) try: headers, response = self.http.request(upload_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: unlink(bundle)
def get_resources(self): res_url = url(self.catalog.service_url, ["workspaces", self.workspace.name, "datastores", self.name, "featuretypes.xml"]) xml = self.catalog.get_xml(res_url) def ft_from_node(node): return featuretype_from_index(self.catalog, self.workspace, self, node) return [ft_from_node(node) for node in xml.findall("featureType")]
def get_styles(self, workspace=None): styles_xml = "styles.xml" if workspace is not None: styles_xml = "workspaces/{0}/styles.xml".format(_name(workspace)) styles_url = url(self.service_url, [styles_xml]) description = self.get_xml(styles_url) return [Style(self, s.find('name').text) for s in description.findall("style")]
def create_layer(self): """ Constructs the layer details and creates it in the geoserver. If the layer already exists then return the pre-existing layer. Layer "existence" is based entirely on the layer's name -- it must be unique @returns geoserver layer """ # If the layer already exists in Geoserver then return it # pdb.set_trace() layer = self.geoserver.get_layer(self.name) layer_workspace_name = None if layer: layer_workspace_name = str(layer.resource._workspace).replace(' ','').split('@')[0] if not layer or (layer_workspace_name and layer_workspace_name != self.workspace_name): #Construct layer creation request. feature_type_url = url(self.geoserver.service_url, [ "workspaces", self.store.workspace.name, "datastores", self.store.name, "featuretypes" ]) data = { "featureType": { "name": self.name, "nativeName": self.data.table_name() } } request_headers = {"Content-type": "application/json"} response_headers, response = self.geoserver.http.request( feature_type_url, "POST", json.dumps(data), request_headers ) if not 200 <= response_headers.status < 300: raise Exception(toolkit._("Geoserver layer creation failed: %i -- %s") % (response_headers.status, response)) layer = self.geoserver.get_layer(self.name) # Add the layer's name to the file resource self.file_resource.update({"layer_name": self.name}) self.file_resource = toolkit.get_action("resource_update")({"user": self.username}, self.file_resource) # Return the layer return layer
def _build_href(self, extension, create=False): path_parts = ["styles"] query = {} if not create: path_parts.append(self.name + extension) else: query['name'] = self.name if self.workspace is not None: path_parts = ["workspaces", getattr(self.workspace, 'name', self.workspace)] + path_parts return url(self.catalog.service_url, path_parts, query)
def get_layers(self, resource=None): if isinstance(resource, basestring): resource = self.get_resource(resource) layers_url = url(self.service_url, ["layers.xml"]) description = self.get_xml(layers_url) lyrs = [Layer(self, l.find("name").text) for l in description.findall("layer")] if resource is not None: lyrs = [l for l in lyrs if l.resource.href == resource.href] # TODO: Filter by style return lyrs
def get_layergroups(self, workspace=None): wks_name = None path_parts = ['layergroups.xml'] if workspace is not None: wks_name = _name(workspace) path_parts = ['workspaces', wks_name] + path_parts groups_url = url(self.service_url, path_parts) groups = self.get_xml(groups_url) return [LayerGroup(self, g.find("name").text, wks_name) for g in groups.findall("layerGroup")]
def get_resources(self): res_url = url(self.catalog.service_url, ["workspaces", self.workspace.name, "coveragestores", self.name, "coverages.xml"]) xml = self.catalog.get_xml(res_url) def cov_from_node(node): return coverage_from_index(self.catalog, self.workspace, self, node) return [cov_from_node(node) for node in xml.findall("coverage")]
def _create_coveragestore(self, name, data, workspace=None, overwrite=False, external=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() archive = None ext = "geotiff" contet_type = "image/tiff" if not external else "text/plain" store_type = "file." if not external else "external." headers = { "Content-type": contet_type, "Accept": "application/xml" } message = data if not external: if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: # If application/archive was used, server crashes with a 500 error # read in many sites that application/zip will do the trick. Successfully tested headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, basestring): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, store_type + ext], { "configure" : "first", "coverageName" : name}) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: unlink(archive)
def get_layergroup(self, name=None, workspace=None): try: path_parts = ["layergroups", name + ".xml"] if workspace is not None: wks_name = _name(workspace) path_parts = ['workspaces', wks_name] + path_parts group_url = url(self.service_url, path_parts) group = self.get_xml(group_url) wks_name = group.find("workspace").find("name").text if group.find("workspace") else None return LayerGroup(self, group.find("name").text, wks_name) except FailedRequestError: return None
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = {"Content-type": "image/tiff", "Accept": "application/xml"} archive = None ext = "geotiff" if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: # If application/archive was used, server crashes with a 500 error # read in many sites that application/zip will do the trick. Successfully tested headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, basestring): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, [ "workspaces", workspace.name, "coveragestores", name, "file." + ext ]) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: unlink(archive)
def get_resources(self, name=None): res_url = url(self.catalog.service_url, ["workspaces", self.workspace.name, "datastores", self.name, "featuretypes.xml"]) xml = self.catalog.get_xml(res_url) def ft_from_node(node): return featuretype_from_index(self.catalog, self.workspace, self, node) #if name passed, return only one FeatureType, otherwise return all FeatureTypes in store: if name is not None: for node in xml.findall("featureType"): if node.findtext("name") == name: return ft_from_node(node) return None return [ft_from_node(node) for node in xml.findall("featureType")]
def testUnicodeUrl(self): """ Tests that the geoserver.support.url function support unicode strings. """ # Test the url function with unicode seg = [ 'workspaces', 'test', 'datastores', u'operaci\xf3n_repo', 'featuretypes.xml' ] u = url(base=self.cat.service_url, seg=seg) self.assertEqual( u, self.cat.service_url + "/workspaces/test/datastores/operaci%C3%B3n_repo/featuretypes.xml") # Test the url function with normal string seg = [ 'workspaces', 'test', 'datastores', 'test-repo', 'featuretypes.xml' ] u = url(base=self.cat.service_url, seg=seg) self.assertEqual( u, self.cat.service_url + "/workspaces/test/datastores/test-repo/featuretypes.xml")
def get_layergroups(self, workspace=None): wks_name = None path_parts = ['layergroups.xml'] if workspace is not None: wks_name = _name(workspace) path_parts = ['workspaces', wks_name] + path_parts groups_url = url(self.service_url, path_parts) groups = self.get_xml(groups_url) return [ LayerGroup(self, g.find("name").text, wks_name) for g in groups.findall("layerGroup") ]
def mosaic_coverage_schema(self, coverage, store): '''Print granules of an existing imagemosaic''' params = dict() cs_url = url(self.service_url, ["workspaces", store.workspace.name, "coveragestores", store.name, "coverages", coverage, "index.json"], params) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "GET", None, headers) self._cache.clear() schema = json.loads(response, object_hook=_decode_dict) return schema
def harvest_externalgranule(self, data, store): '''Harvest a granule into an existing imagemosaic''' params = dict() cs_url = url(self.service_url, ["workspaces", store.workspace.name, "coveragestores", store.name, "external.imagemosaic"], params) # POST /workspaces/<ws>/coveragestores/<name>/external.imagemosaic headers = { "Content-type": "text/plain", "Accept": "application/xml" } headers, response = self.http.request(cs_url, "POST", data, headers) self._cache.clear() if headers.status != 202: raise UploadError(response)
def add_data_to_store(self, store, name, data, workspace=None, overwrite=False, charset=None): if isinstance(store, basestring): store = self.get_store(store, workspace=workspace) if workspace is not None: workspace = _name(workspace) assert store.workspace.name == workspace, "Specified store (%s) is not in specified workspace (%s)!" % ( store, workspace) else: workspace = store.workspace.name store = store.name if isinstance(data, dict): bundle = prepare_upload_bundle(name, data) else: bundle = data params = dict() if overwrite: params["update"] = "overwrite" if charset is not None: params["charset"] = charset params["filename"] = "{}.zip".format(name) params["target"] = "shp" # params["configure"] = "all" headers = { 'Content-Type': 'application/zip', 'Accept': 'application/xml' } upload_url = url( self.service_url, ["workspaces", workspace, "datastores", store, "file.shp"], params) try: with open(bundle, "rb") as f: data = f.read() headers, response = self.http.request(upload_url, "PUT", data, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: # os.unlink(bundle) pass
def mosaic_delete_granule(self, coverage, store, granule_id): '''Deletes a granule of an existing imagemosaic''' params = dict() cs_url = url(self.service_url, ["workspaces", store.workspace.name, "coveragestores", store.name, "coverages", coverage, "index/granules", granule_id,".json"], params) # DELETE /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules/<granule_id>.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "DELETE", None, headers) self._cache.clear() if headers.status != 200: raise FailedRequestError(response)
def get_layergroup(self, name=None, workspace=None): try: path_parts = ["layergroups", name + ".xml"] if workspace is not None: wks_name = _name(workspace) path_parts = ['workspaces', wks_name] + path_parts group_url = url(self.service_url, path_parts) group = self.get_xml(group_url) wks_name = group.find("workspace").find("name").text if group.find( "workspace") else None return LayerGroup(self, group.find("name").text, wks_name) except FailedRequestError: return None
def add_granule(self, data, store, workspace=None): '''Harvest/add a granule into an existing imagemosaic''' ext = os.path.splitext(data)[-1] if ext == ".zip": type = "file.imagemosaic" upload_data = open(data, 'rb') headers = { "Content-type": "application/zip", "Accept": "application/xml" } else: type = "external.imagemosaic" upload_data = data if data.startswith("file:") else "file:{data}".format(data=data) headers = { "Content-type": "text/plain", "Accept": "application/xml" } params = dict() workspace_name = workspace if isinstance(store, basestring): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, type ], params ) try: headers, response = self.http.request(cs_url, "POST", upload_data, headers) if headers.status != 202: raise UploadError(response) finally: if hasattr(upload_data, "close"): upload_data.close() self._cache.clear() return "Added granule"
def add_granule(self, data, store, workspace=None): """Harvest/add a granule into an existing imagemosaic""" ext = os.path.splitext(data)[-1] if ext == ".zip": type = "file.imagemosaic" upload_data = open(data, 'rb') headers = { "Content-type": "application/zip", "Accept": "application/xml" } else: type = "external.imagemosaic" upload_data = data if data.startswith( "file:") else "file:{data}".format(data=data) headers = { "Content-type": "text/plain", "Accept": "application/xml" } params = dict() workspace_name = workspace if isinstance(store, basestring): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, ["workspaces", workspace_name, "coveragestores", store_name, type]) try: response = self.request(method='post', url=cs_url, headers=headers, data=upload_data, params=params) if response.status_code != 202: raise UploadError('{0} - "{1}"'.format(response.status_code, response.text)) finally: if getattr(upload_data, "close", None) is not None: upload_data.close() self._cache.clear() return "Added granule"
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: # response = self.requests.post(ds_url, files={archive: open(archive, 'rb')}) headers, response = self.http.request(ds_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: message.close() unlink(archive)
def list_granules(self, coverage, store, workspace=None, filter=None, limit=None, offset=None): """List granules of an imagemosaic""" params = dict() if filter is not None: params['filter'] = filter if limit is not None: params['limit'] = limit if offset is not None: params['offset'] = offset workspace_name = workspace if isinstance(store, basestring): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url(self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, "coverages", coverage, "index/granules.json" ]) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules.json headers = { "Content-type": "application/json", "Accept": "application/json" } response = self.request(method='get', url=cs_url, headers=headers, params=params) if response.status_code != 200: raise FailedRequestError('{0} - "{1}"'.format( response.status_code, response.text)) self._cache.clear() granules = response.json(object_hook=_decode_dict) return granules
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: store = self.get_store(name, workspace) if store is not None: msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"]) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: response = self.request(method='put', url=ds_url, headers=headers, data=message, params=params) self._cache.clear() if response.status_code != 201: raise UploadError('{0} - "{1}"'.format(response.status_code, response.text)) finally: message.close() os.unlink(archive)
def create_layer(self): """ Constructs the layer details and creates it in the geoserver. If the layer already exists then return the pre-existing layer. Layer "existence" is based entirely on the layer's name -- it must be unique @returns geoserver layer """ # If the layer already exists in Geoserver then return it layer = self.geoserver.get_layer(self.name) if not layer: #Construct layer creation request. feature_type_url = url(self.geoserver.service_url, [ "workspaces", self.store.workspace.name, "datastores", self.store.name, "featuretypes" ]) data = { "featureType": { "name": self.name, "nativeName": self.data.table_name() } } request_headers = {"Content-type": "application/json"} response_headers, response = self.geoserver.http.request( feature_type_url, "POST", json.dumps(data), request_headers) if not 200 <= response_headers.status < 300: raise Exception( toolkit._("Geoserver layer creation failed: %i -- %s") % (response_headers.status, response)) layer = self.geoserver.get_layer(self.name) # Add the layer's name to the file resource self.file_resource.update({"layer_name": self.name}) self.file_resource = toolkit.get_action("resource_update")( { "user": self.username }, self.file_resource) # Return the layer return layer
def create_imagemosaic(self, name, data, configure=None, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset if configure is not None: params['configure'] = "none" cs_url = url(self.service_url, [ "workspaces", workspace, "coveragestores", name, "file.imagemosaic" ], params) # PUT /workspaces/<ws>/coveragestores/<name>/file.imagemosaic?configure=none headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, basestring): message = open(data, 'rb') else: message = data try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close()
def get_resources(self, name=None): res_url = url(self.catalog.service_url, ["workspaces", self.workspace.name, "coveragestores", self.name, "coverages.xml"]) xml = self.catalog.get_xml(res_url) def cov_from_node(node): return coverage_from_index(self.catalog, self.workspace, self, node) #if name passed, return only one Coverage, otherwise return all Coverages in store: if name is not None: for node in xml.findall("coverage"): if node.findtext("name") == name: return cov_from_node(node) return None return [cov_from_node(node) for node in xml.findall("coverage")]
def create_coveragestore(self, name, data, workspace=None, overwrite=False): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() headers = { "Content-type": "image/tiff", "Accept": "application/xml" } archive = None ext = "geotiff" if isinstance(data, dict): archive = prepare_upload_bundle(name, data) message = open(archive, 'rb') if "tfw" in data: headers['Content-type'] = 'application/zip' ext = "worldimage" elif isinstance(data, basestring): message = open(data, 'rb') else: message = data cs_url = url(self.service_url, ["workspaces", workspace.name, "coveragestores", name, "file." + ext]) try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close() if archive is not None: unlink(archive)
def list_granules(self, coverage, store, workspace=None, filter=None, limit=None, offset=None): '''List granules of an imagemosaic''' params = dict() if filter is not None: params['filter'] = filter if limit is not None: params['limit'] = limit if offset is not None: params['offset'] = offset workspace_name = workspace if isinstance(store, basestring): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, "coverages", coverage, "index/granules.json" ], params ) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "GET", None, headers) if headers.status != 200: raise FailedRequestError(response) self._cache.clear() granules = json.loads(response, object_hook=_decode_dict) return granules
def _build_href(self, extension='', create=False, with_name=True): path_parts = [] query = {} if not create: path_parts.append("styles") if with_name: path_parts.append(self.name + extension) else: path_parts.append("styles" + extension) if with_name: query['name'] = self.name if self.workspace is not None: path_parts = [ "workspaces", getattr(self.workspace, 'name', self.workspace) ] + path_parts return url(self.catalog.service_url, path_parts, query)
def publish_featuretype(self, name, store, native_crs, srs=None,keywords=None,title=None,abstract=None,nativeName=None,jdbc_virtual_table=None): '''Publish a featuretype from data in an existing store''' # @todo native_srs doesn't seem to get detected, even when in the DB # metadata (at least for postgis in geometry_columns) and then there # will be a misconfigured layer if native_crs is None: raise ValueError("must specify native_crs") if native_crs == "EPSG:0": raise ValueError("CRS was set as EPSG:0! Check that the Input object is specified with a valid CRS") srs = srs or native_crs feature_type = FeatureType(self, store.workspace, store, name) # because name is the in FeatureType base class, work around that # and hack in these others that don't have xml properties feature_type.dirty['name'] = name feature_type.dirty['srs'] = srs feature_type.dirty['nativeCRS'] = native_crs if nativeName: feature_type.dirty["nativeName"] = nativeName else: feature_type.dirty["nativeName"] = name if title: feature_type.dirty['title'] = title else: feature_type.title = name if abstract: feature_type.dirty['abstract'] = abstract if keywords: feature_type.dirty['keywords'] = keywords feature_type.enabled = True headers = { "Content-type": "application/xml", "Accept": "application/xml" } resource_url=store.resource_url if jdbc_virtual_table is not None: feature_type.metadata=({'JDBC_VIRTUAL_TABLE':jdbc_virtual_table}) params = dict() resource_url=url(self.service_url, ["workspaces", store.workspace.name, "datastores", store.name, "featuretypes.json"], params) headers, response = self.http.request(resource_url, "POST", feature_type.message(), headers) if int(headers['status']) >= 400: raise Exception("publish feature failed.\nresponse headers:{}\nreason:{}".format(headers,response)) feature_type.fetch() return feature_type
def get_resources(self, name=None): res_url = url(self.catalog.service_url, [ "workspaces", self.workspace.name, "datastores", self.name, "featuretypes.xml" ]) xml = self.catalog.get_xml(res_url) def ft_from_node(node): return featuretype_from_index(self.catalog, self.workspace, self, node) #if name passed, return only one FeatureType, otherwise return all FeatureTypes in store: if name is not None: for node in xml.findall("featureType"): if node.findtext("name") == name: return ft_from_node(node) return None return [ft_from_node(node) for node in xml.findall("featureType")]
def mosaic_coverages(self, store): '''Returns all coverages in a coverage store''' params = dict() cs_url = url(self.service_url, [ "workspaces", store.workspace.name, "coveragestores", store.name, "coverages.json" ], params) # GET /workspaces/<ws>/coveragestores/<name>/coverages.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "GET", None, headers) if headers.status != 200: raise FailedRequestError(response) self._cache.clear() coverages = json.loads(response, object_hook=_decode_dict) return coverages
def harvest_uploadgranule(self, data, store): '''Harvest a granule into an existing imagemosaic''' params = dict() cs_url = url(self.service_url, ["workspaces", store.workspace.name, "coveragestores", store.name, "file.imagemosaic"], params) # POST /workspaces/<ws>/coveragestores/<name>/file.imagemosaic headers = { "Content-type": "application/zip", "Accept": "application/xml" } message = open(data, 'rb') try: headers, response = self.http.request(cs_url, "POST", message, headers) self._cache.clear() if headers.status != 202: raise UploadError(response) finally: if hasattr(message, "close"): message.close()
def publish_featuretype(self, name, store, native_crs, srs=None, jdbc_virtual_table=None): """Publish a featuretype from data in an existing store""" # @todo native_srs doesn't seem to get detected, even when in the DB # metadata (at least for postgis in geometry_columns) and then there # will be a misconfigured layer if native_crs is None: raise ValueError("must specify native_crs") srs = srs or native_crs feature_type = FeatureType(self, store.workspace, store, name) # because name is the in FeatureType base class, work around that # and hack in these others that don't have xml properties feature_type.dirty['name'] = name feature_type.dirty['srs'] = srs feature_type.dirty['nativeCRS'] = native_crs feature_type.enabled = True feature_type.title = name headers = { "Content-type": "application/xml", "Accept": "application/xml" } resource_url = store.resource_url if jdbc_virtual_table is not None: feature_type.metadata = ({ 'JDBC_VIRTUAL_TABLE': jdbc_virtual_table }) resource_url = url(self.service_url, [ "workspaces", store.workspace.name, "datastores", store.name, "featuretypes.json" ]) self.request(method='post', url=resource_url, headers=headers, data=feature_type.message()) feature_type.fetch() return feature_type
def mosaic_coverages(self, store): """Returns all coverages in a coverage store""" cs_url = url(self.service_url, [ "workspaces", store.workspace.name, "coveragestores", store.name, "coverages.json" ]) # GET /workspaces/<ws>/coveragestores/<name>/coverages.json headers = { "Content-type": "application/json", "Accept": "application/json" } response = self.request(method='get', url=cs_url, headers=headers) if response.status_code != 200: raise FailedRequestError('{0} - "{1}"'.format( response.status_code, response.text)) self._cache.clear() coverages = response.json(object_hook=_decode_dict) return coverages
def create_featurestore(self, name, data, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset ds_url = url(self.service_url, ["workspaces", workspace, "datastores", name, "file.shp"], params) # PUT /workspaces/<ws>/datastores/<ds>/file.shp headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data,dict): logger.debug('Data is NOT a zipfile') archive = prepare_upload_bundle(name, data) else: logger.debug('Data is a zipfile') archive = data message = open(archive, 'rb') try: # response = self.requests.post(ds_url, files={archive: open(archive, 'rb')}) headers, response = self.http.request(ds_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: message.close() unlink(archive)
def mosaic_granules(self, coverage, store, filter=None, limit=None): '''Print granules of an existing imagemosaic''' params = dict() if filter is not None: params['filter'] = filter if limit is not None: params['limit'] = limit cs_url = url(self.service_url, [ "workspaces", store.workspace.name, "coveragestores", store.name, "coverages", coverage, "index/granules.json" ], params) # GET /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "GET", None, headers) self._cache.clear() granules = json.loads(response, object_hook=_decode_dict) return granules
def settings(self): settings = {} settings_url = url(self.catalog.service_url, ['settings.xml']) headers, response = self.http.request(settings_url, 'GET') if headers.status != 200: raise Exception('Settings listing failed - %s, %s' % (headers,response)) dom = XML(response) sections = ['settings', 'jai','coverageAccess'] for section in sections: params = [] node = dom.find(section) if node is not None: #it will be none if the catalog does not support this operation for entry in node: if len(entry) == 0: params.append((entry.tag, entry.text)) else: for subentry in entry: params.append((entry.tag + '/' + subentry.tag, subentry.text)) settings[section] = params return settings
def create_imagemosaic(self, name, data, configure=None, workspace=None, overwrite=False, charset=None): if not overwrite: try: store = self.get_store(name, workspace) msg = "There is already a store named " + name if workspace: msg += " in " + str(workspace) raise ConflictingDataError(msg) except FailedRequestError: # we don't really expect that every layer name will be taken pass if workspace is None: workspace = self.get_default_workspace() workspace = _name(workspace) params = dict() if charset is not None: params['charset'] = charset if configure is not None: params['configure'] = "none" cs_url = url(self.service_url, ["workspaces", workspace, "coveragestores", name, "file.imagemosaic"], params) # PUT /workspaces/<ws>/coveragestores/<name>/file.imagemosaic?configure=none headers = { "Content-type": "application/zip", "Accept": "application/xml" } if isinstance(data, basestring): message = open(data, 'rb') else: message = data try: headers, response = self.http.request(cs_url, "PUT", message, headers) self._cache.clear() if headers.status != 201: raise UploadError(response) finally: if hasattr(message, "close"): message.close()
def delete_granule(self, coverage, store, granule_id, workspace=None): '''Deletes a granule of an existing imagemosaic''' params = dict() workspace_name = workspace if isinstance(store, basestring): store_name = store else: store_name = store.name workspace_name = store.workspace.name if workspace_name is None: raise ValueError("Must specify workspace") cs_url = url( self.service_url, [ "workspaces", workspace_name, "coveragestores", store_name, "coverages", coverage, "index/granules", granule_id, ".json" ], params ) # DELETE /workspaces/<ws>/coveragestores/<name>/coverages/<coverage>/index/granules/<granule_id>.json headers = { "Content-type": "application/json", "Accept": "application/json" } headers, response = self.http.request(cs_url, "DELETE", None, headers) if headers.status != 200: raise FailedRequestError(response) self._cache.clear() return "Deleted granule"
def update(self, settings): root = ET.Element('global') for section in settings: params = settings[section] element = ET.SubElement(root, section) for name, value in params: if '/' in name: name, subname = name.split('/') subelement = element.find(name) if subelement is None: subelement = ET.SubElement(element, name) subsubelement = ET.SubElement(subelement, subname) subsubelement.text = unicode(value) else: subelement = ET.SubElement(element, name) subelement.text = unicode(value) xml = ET.tostring(root) settings_url = url(self.catalog.service_url, ['settings.xml']) headers = {'Content-type': 'text/xml'} headers, response = self.http.request(settings_url, 'PUT', xml, headers = headers) if headers.status != 200: raise Exception('Settings update failed - %s, %s' % (headers,response))
def get_layers(self, resource=None): """Prefix the layer name with ws name""" # Original code from gsconfig if isinstance(resource, basestring): resource = self.get_resource(resource) layers_url = url(self.service_url, ["layers.xml"]) description = self.get_xml(layers_url) lyrs = [ BaseLayer(self, l.find("name").text) for l in description.findall("layer") ] if resource is not None: lyrs = [l for l in lyrs if l.resource.href == resource.href] # Start patch: layers = {} result = [] for l in lyrs: try: layers[l.name].append(l) except KeyError: layers[l.name] = [l] # Prefix all names for name, ls in layers.items(): if len(ls) == 1: l = ls[0] l.name = self.get_namespaced_name(l.name) result.append(l) else: i = 0 res = self._get_res(ls[0].name) for l in ls: l.name = "%s:%s" % (res[i].workspace.name, l.name) i += 1 result.append(l) return result
def href(self): return self._href or url(self.catalog.service_url, [ "workspaces", self.workspace.name, self.url_part_stores, self.store.name, self.url_part_types, self.name + ".xml" ])
def reload(self): reload_url = url(self.service_url, ['reload']) response = self.http.request(reload_url, "POST") self._cache.clear() return response